var/home/core/zuul-output/0000755000175000017500000000000015073265037014535 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015073275553015505 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004767406715073275545017737 0ustar rootrootOct 13 21:11:21 crc systemd[1]: Starting Kubernetes Kubelet... Oct 13 21:11:22 crc restorecon[4678]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 21:11:22 crc restorecon[4678]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 13 21:11:22 crc restorecon[4678]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 13 21:11:23 crc kubenswrapper[4689]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 13 21:11:23 crc kubenswrapper[4689]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 13 21:11:23 crc kubenswrapper[4689]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 13 21:11:23 crc kubenswrapper[4689]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 13 21:11:23 crc kubenswrapper[4689]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 13 21:11:23 crc kubenswrapper[4689]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.551614 4689 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565251 4689 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565329 4689 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565343 4689 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565353 4689 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565364 4689 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565377 4689 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565387 4689 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565404 4689 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565414 4689 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565423 4689 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565432 4689 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565441 4689 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565449 4689 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565458 4689 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565467 4689 feature_gate.go:330] unrecognized feature gate: Example Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565476 4689 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565485 4689 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565494 4689 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565502 4689 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565511 4689 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565524 4689 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565533 4689 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565542 4689 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565551 4689 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565561 4689 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565570 4689 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565579 4689 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565621 4689 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565630 4689 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565642 4689 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565655 4689 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565665 4689 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565681 4689 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565690 4689 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565699 4689 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565708 4689 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565717 4689 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565726 4689 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565734 4689 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565743 4689 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565751 4689 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565760 4689 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565769 4689 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565777 4689 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565797 4689 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565810 4689 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565820 4689 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565830 4689 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565839 4689 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565848 4689 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565857 4689 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565868 4689 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565880 4689 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565891 4689 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565904 4689 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565915 4689 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565929 4689 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565942 4689 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565954 4689 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565974 4689 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565988 4689 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.565997 4689 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.566007 4689 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.566016 4689 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.566025 4689 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.566035 4689 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.566044 4689 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.566058 4689 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.566067 4689 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.566076 4689 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.566084 4689 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566365 4689 flags.go:64] FLAG: --address="0.0.0.0" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566393 4689 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566416 4689 flags.go:64] FLAG: --anonymous-auth="true" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566433 4689 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566460 4689 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566474 4689 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566489 4689 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566502 4689 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566512 4689 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566523 4689 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566534 4689 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566546 4689 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566563 4689 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566573 4689 flags.go:64] FLAG: --cgroup-root="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566614 4689 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566625 4689 flags.go:64] FLAG: --client-ca-file="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566636 4689 flags.go:64] FLAG: --cloud-config="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566647 4689 flags.go:64] FLAG: --cloud-provider="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566658 4689 flags.go:64] FLAG: --cluster-dns="[]" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566670 4689 flags.go:64] FLAG: --cluster-domain="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566686 4689 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566699 4689 flags.go:64] FLAG: --config-dir="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566709 4689 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566720 4689 flags.go:64] FLAG: --container-log-max-files="5" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566735 4689 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566745 4689 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566755 4689 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566765 4689 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566777 4689 flags.go:64] FLAG: --contention-profiling="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566792 4689 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566802 4689 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566813 4689 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566823 4689 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566836 4689 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566846 4689 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566856 4689 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566866 4689 flags.go:64] FLAG: --enable-load-reader="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566882 4689 flags.go:64] FLAG: --enable-server="true" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566892 4689 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566905 4689 flags.go:64] FLAG: --event-burst="100" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566915 4689 flags.go:64] FLAG: --event-qps="50" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566925 4689 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566935 4689 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566945 4689 flags.go:64] FLAG: --eviction-hard="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566957 4689 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566967 4689 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566983 4689 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.566993 4689 flags.go:64] FLAG: --eviction-soft="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567003 4689 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567013 4689 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567024 4689 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567034 4689 flags.go:64] FLAG: --experimental-mounter-path="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567044 4689 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567054 4689 flags.go:64] FLAG: --fail-swap-on="true" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567070 4689 flags.go:64] FLAG: --feature-gates="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567082 4689 flags.go:64] FLAG: --file-check-frequency="20s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567092 4689 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567103 4689 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567113 4689 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567124 4689 flags.go:64] FLAG: --healthz-port="10248" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567135 4689 flags.go:64] FLAG: --help="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567145 4689 flags.go:64] FLAG: --hostname-override="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567160 4689 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567171 4689 flags.go:64] FLAG: --http-check-frequency="20s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567182 4689 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567193 4689 flags.go:64] FLAG: --image-credential-provider-config="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567203 4689 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567212 4689 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567222 4689 flags.go:64] FLAG: --image-service-endpoint="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567232 4689 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567242 4689 flags.go:64] FLAG: --kube-api-burst="100" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567257 4689 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567267 4689 flags.go:64] FLAG: --kube-api-qps="50" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567277 4689 flags.go:64] FLAG: --kube-reserved="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567287 4689 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567297 4689 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567307 4689 flags.go:64] FLAG: --kubelet-cgroups="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567317 4689 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567327 4689 flags.go:64] FLAG: --lock-file="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567336 4689 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567352 4689 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567362 4689 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567378 4689 flags.go:64] FLAG: --log-json-split-stream="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567388 4689 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567398 4689 flags.go:64] FLAG: --log-text-split-stream="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567408 4689 flags.go:64] FLAG: --logging-format="text" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567418 4689 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567428 4689 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567445 4689 flags.go:64] FLAG: --manifest-url="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567457 4689 flags.go:64] FLAG: --manifest-url-header="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567485 4689 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567506 4689 flags.go:64] FLAG: --max-open-files="1000000" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567528 4689 flags.go:64] FLAG: --max-pods="110" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567542 4689 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567555 4689 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567567 4689 flags.go:64] FLAG: --memory-manager-policy="None" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567621 4689 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567634 4689 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567645 4689 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567655 4689 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567681 4689 flags.go:64] FLAG: --node-status-max-images="50" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567691 4689 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567701 4689 flags.go:64] FLAG: --oom-score-adj="-999" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567720 4689 flags.go:64] FLAG: --pod-cidr="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567729 4689 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567744 4689 flags.go:64] FLAG: --pod-manifest-path="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567754 4689 flags.go:64] FLAG: --pod-max-pids="-1" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567764 4689 flags.go:64] FLAG: --pods-per-core="0" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567774 4689 flags.go:64] FLAG: --port="10250" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567785 4689 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567795 4689 flags.go:64] FLAG: --provider-id="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567804 4689 flags.go:64] FLAG: --qos-reserved="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567820 4689 flags.go:64] FLAG: --read-only-port="10255" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567830 4689 flags.go:64] FLAG: --register-node="true" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567839 4689 flags.go:64] FLAG: --register-schedulable="true" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567851 4689 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567870 4689 flags.go:64] FLAG: --registry-burst="10" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567880 4689 flags.go:64] FLAG: --registry-qps="5" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567890 4689 flags.go:64] FLAG: --reserved-cpus="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567905 4689 flags.go:64] FLAG: --reserved-memory="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567918 4689 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567928 4689 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567939 4689 flags.go:64] FLAG: --rotate-certificates="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567948 4689 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567959 4689 flags.go:64] FLAG: --runonce="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567968 4689 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567979 4689 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.567994 4689 flags.go:64] FLAG: --seccomp-default="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568004 4689 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568014 4689 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568024 4689 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568034 4689 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568046 4689 flags.go:64] FLAG: --storage-driver-password="root" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568056 4689 flags.go:64] FLAG: --storage-driver-secure="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568066 4689 flags.go:64] FLAG: --storage-driver-table="stats" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568076 4689 flags.go:64] FLAG: --storage-driver-user="root" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568091 4689 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568102 4689 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568112 4689 flags.go:64] FLAG: --system-cgroups="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568122 4689 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568137 4689 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568147 4689 flags.go:64] FLAG: --tls-cert-file="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568157 4689 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568174 4689 flags.go:64] FLAG: --tls-min-version="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568184 4689 flags.go:64] FLAG: --tls-private-key-file="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568193 4689 flags.go:64] FLAG: --topology-manager-policy="none" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568203 4689 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568214 4689 flags.go:64] FLAG: --topology-manager-scope="container" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568226 4689 flags.go:64] FLAG: --v="2" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568261 4689 flags.go:64] FLAG: --version="false" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568277 4689 flags.go:64] FLAG: --vmodule="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568301 4689 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.568315 4689 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568766 4689 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568782 4689 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568794 4689 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568804 4689 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568820 4689 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568829 4689 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568839 4689 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568850 4689 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568863 4689 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568873 4689 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568882 4689 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568894 4689 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568906 4689 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568915 4689 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568925 4689 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568937 4689 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568952 4689 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568962 4689 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568972 4689 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568981 4689 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568989 4689 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.568998 4689 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569007 4689 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569015 4689 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569024 4689 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569032 4689 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569042 4689 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569051 4689 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569059 4689 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569083 4689 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569092 4689 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569101 4689 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569109 4689 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569118 4689 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569127 4689 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569136 4689 feature_gate.go:330] unrecognized feature gate: Example Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569145 4689 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569153 4689 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569161 4689 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569173 4689 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569183 4689 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569199 4689 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569208 4689 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569219 4689 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569228 4689 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569237 4689 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569246 4689 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569255 4689 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569263 4689 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569272 4689 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569280 4689 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569290 4689 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569299 4689 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569307 4689 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569321 4689 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569330 4689 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569339 4689 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569350 4689 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569361 4689 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569370 4689 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569379 4689 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569391 4689 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569400 4689 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569409 4689 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569417 4689 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569426 4689 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569440 4689 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569450 4689 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569462 4689 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569474 4689 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.569484 4689 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.570720 4689 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.591084 4689 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.591156 4689 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591462 4689 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591491 4689 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591503 4689 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591522 4689 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591538 4689 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591551 4689 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591568 4689 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591636 4689 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591646 4689 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591657 4689 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591668 4689 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591677 4689 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591687 4689 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591696 4689 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591706 4689 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591722 4689 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591731 4689 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591741 4689 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591750 4689 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591759 4689 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591768 4689 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591778 4689 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591788 4689 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591796 4689 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591805 4689 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591814 4689 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591823 4689 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591832 4689 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591846 4689 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591854 4689 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591863 4689 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591872 4689 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591881 4689 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591890 4689 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591899 4689 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591907 4689 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591919 4689 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591934 4689 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591960 4689 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591971 4689 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591987 4689 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.591998 4689 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592011 4689 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592022 4689 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592031 4689 feature_gate.go:330] unrecognized feature gate: Example Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592042 4689 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592051 4689 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592060 4689 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592071 4689 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592080 4689 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592089 4689 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592100 4689 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592109 4689 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592141 4689 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592151 4689 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592162 4689 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592173 4689 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592184 4689 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592196 4689 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592209 4689 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592221 4689 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592236 4689 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592250 4689 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592261 4689 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592271 4689 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592287 4689 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592296 4689 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592309 4689 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592337 4689 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592346 4689 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.592355 4689 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.592371 4689 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599284 4689 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599392 4689 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599430 4689 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599444 4689 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599466 4689 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599485 4689 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599497 4689 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599506 4689 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599534 4689 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599550 4689 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599568 4689 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599615 4689 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599624 4689 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599632 4689 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599648 4689 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599657 4689 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599665 4689 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599682 4689 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599691 4689 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599708 4689 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599716 4689 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599724 4689 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599741 4689 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599756 4689 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599765 4689 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599784 4689 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599794 4689 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599802 4689 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599811 4689 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599818 4689 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599826 4689 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599836 4689 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599844 4689 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599855 4689 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599867 4689 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599878 4689 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599887 4689 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599898 4689 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599907 4689 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599918 4689 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599927 4689 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599940 4689 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599949 4689 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599958 4689 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599968 4689 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599978 4689 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599987 4689 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.599996 4689 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600007 4689 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600018 4689 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600027 4689 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600036 4689 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600044 4689 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600053 4689 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600061 4689 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600072 4689 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600083 4689 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600093 4689 feature_gate.go:330] unrecognized feature gate: Example Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600103 4689 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600115 4689 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600127 4689 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600138 4689 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600149 4689 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600159 4689 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600169 4689 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600182 4689 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600192 4689 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600200 4689 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600208 4689 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600215 4689 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.600223 4689 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.600240 4689 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.600705 4689 server.go:940] "Client rotation is on, will bootstrap in background" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.607722 4689 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.607896 4689 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.611948 4689 server.go:997] "Starting client certificate rotation" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.611992 4689 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.613104 4689 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-29 14:02:04.715163725 +0000 UTC Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.613297 4689 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1120h50m41.101872653s for next certificate rotation Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.644362 4689 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.648087 4689 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.668862 4689 log.go:25] "Validated CRI v1 runtime API" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.712882 4689 log.go:25] "Validated CRI v1 image API" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.716625 4689 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.723540 4689 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-13-21-06-46-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.723629 4689 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.752033 4689 manager.go:217] Machine: {Timestamp:2025-10-13 21:11:23.747955077 +0000 UTC m=+0.666201082 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:3639096d-e021-4f30-b44c-3e32b233f5a5 BootID:5dcf8212-d44f-4948-b8a0-e9c7ca68ad65 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:81:25:13 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:81:25:13 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:da:7e:41 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:80:88:c7 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:63:ff:3e Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:c7:63:35 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:2e:46:18:87:46:71 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:3a:48:21:7c:f3:71 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.752559 4689 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.752854 4689 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.753379 4689 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.753704 4689 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.753772 4689 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.754491 4689 topology_manager.go:138] "Creating topology manager with none policy" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.754511 4689 container_manager_linux.go:303] "Creating device plugin manager" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.755525 4689 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.755741 4689 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.757269 4689 state_mem.go:36] "Initialized new in-memory state store" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.757432 4689 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.761820 4689 kubelet.go:418] "Attempting to sync node with API server" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.761856 4689 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.761883 4689 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.761905 4689 kubelet.go:324] "Adding apiserver pod source" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.761947 4689 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.773978 4689 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.21:6443: connect: connection refused Oct 13 21:11:23 crc kubenswrapper[4689]: E1013 21:11:23.774263 4689 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.21:6443: connect: connection refused" logger="UnhandledError" Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.773996 4689 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.21:6443: connect: connection refused Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.774299 4689 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 13 21:11:23 crc kubenswrapper[4689]: E1013 21:11:23.774337 4689 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.21:6443: connect: connection refused" logger="UnhandledError" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.775694 4689 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.777331 4689 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.779155 4689 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.779190 4689 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.779200 4689 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.779209 4689 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.779226 4689 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.779238 4689 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.779250 4689 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.779267 4689 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.779281 4689 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.779294 4689 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.779321 4689 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.779332 4689 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.780681 4689 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.781353 4689 server.go:1280] "Started kubelet" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.782132 4689 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.21:6443: connect: connection refused Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.783061 4689 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.783092 4689 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.784102 4689 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 13 21:11:23 crc systemd[1]: Started Kubernetes Kubelet. Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.785737 4689 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.785795 4689 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.786145 4689 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 04:14:46.560181335 +0000 UTC Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.786207 4689 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 607h3m22.773978579s for next certificate rotation Oct 13 21:11:23 crc kubenswrapper[4689]: E1013 21:11:23.786852 4689 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.786995 4689 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.787046 4689 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.787074 4689 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.787861 4689 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.21:6443: connect: connection refused Oct 13 21:11:23 crc kubenswrapper[4689]: E1013 21:11:23.788035 4689 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.21:6443: connect: connection refused" logger="UnhandledError" Oct 13 21:11:23 crc kubenswrapper[4689]: E1013 21:11:23.788949 4689 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.21:6443: connect: connection refused" interval="200ms" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.789388 4689 factory.go:55] Registering systemd factory Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.789412 4689 server.go:460] "Adding debug handlers to kubelet server" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.789418 4689 factory.go:221] Registration of the systemd container factory successfully Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.790463 4689 factory.go:153] Registering CRI-O factory Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.790517 4689 factory.go:221] Registration of the crio container factory successfully Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.790696 4689 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.790774 4689 factory.go:103] Registering Raw factory Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.790816 4689 manager.go:1196] Started watching for new ooms in manager Oct 13 21:11:23 crc kubenswrapper[4689]: E1013 21:11:23.790383 4689 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.21:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186e2945b8f9ffbf default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-13 21:11:23.781316543 +0000 UTC m=+0.699561638,LastTimestamp:2025-10-13 21:11:23.781316543 +0000 UTC m=+0.699561638,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.793377 4689 manager.go:319] Starting recovery of all containers Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.801485 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.802327 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.805708 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.805908 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.806017 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.806753 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.806958 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.807072 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.807289 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.811516 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.811580 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.811637 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.811660 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.811694 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.811752 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.811775 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.811797 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.811820 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.811842 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.811864 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.811887 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.811912 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.811934 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.811958 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.811981 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812004 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812055 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812078 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812101 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812124 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812145 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812165 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812190 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812212 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812236 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812258 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812281 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812303 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812327 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812351 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812374 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812396 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812419 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812444 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812466 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812492 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812552 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812574 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812631 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812656 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812679 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812712 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812752 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812777 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812800 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812824 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812858 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812882 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812905 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812933 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.812955 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813045 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813070 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813093 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813118 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813142 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813167 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813191 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813211 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813235 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813262 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813283 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813305 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813327 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813349 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813372 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813396 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813420 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813442 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813463 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813486 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813510 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813530 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813553 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813573 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813622 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813645 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813670 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813693 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813715 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813736 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813758 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813781 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813806 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813831 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813852 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813874 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813896 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813919 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813941 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813963 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.813984 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814007 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814062 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814102 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814128 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814153 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814180 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814203 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814229 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814253 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814278 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814300 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814323 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814346 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814368 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814389 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814420 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814444 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814467 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814492 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814515 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814544 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814566 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814613 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814641 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814662 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814685 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814708 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814731 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814753 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814774 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814797 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814819 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814841 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814863 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814885 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814906 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814927 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814948 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814973 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.814998 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815051 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815076 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815098 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815126 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815149 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815172 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815193 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815215 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815234 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815255 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815277 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815299 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815319 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815341 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815362 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815435 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815458 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815480 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815502 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815525 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815546 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815566 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815609 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815631 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815650 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815672 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815722 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815745 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815768 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815790 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815811 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815834 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.815864 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.823459 4689 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.823551 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.823626 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.823652 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.823676 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.823710 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.823734 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.823807 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.823829 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.824317 4689 manager.go:324] Recovery completed Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.824866 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.824980 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.825043 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.825219 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.825307 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.825788 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.825912 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.826138 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.826323 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.826405 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.826475 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.826544 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.826627 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.826696 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.826763 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.826824 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.827654 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.827737 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.827819 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.827883 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.828188 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.828430 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.828784 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.829902 4689 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.829936 4689 reconstruct.go:97] "Volume reconstruction finished" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.829947 4689 reconciler.go:26] "Reconciler: start to sync state" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.837524 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.839497 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.839549 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.839564 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.841699 4689 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.841723 4689 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.841744 4689 state_mem.go:36] "Initialized new in-memory state store" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.862430 4689 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.865999 4689 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.866112 4689 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.866166 4689 kubelet.go:2335] "Starting kubelet main sync loop" Oct 13 21:11:23 crc kubenswrapper[4689]: E1013 21:11:23.866255 4689 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.866806 4689 policy_none.go:49] "None policy: Start" Oct 13 21:11:23 crc kubenswrapper[4689]: W1013 21:11:23.869810 4689 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.21:6443: connect: connection refused Oct 13 21:11:23 crc kubenswrapper[4689]: E1013 21:11:23.869938 4689 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.21:6443: connect: connection refused" logger="UnhandledError" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.870491 4689 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.870518 4689 state_mem.go:35] "Initializing new in-memory state store" Oct 13 21:11:23 crc kubenswrapper[4689]: E1013 21:11:23.887086 4689 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 13 21:11:23 crc kubenswrapper[4689]: E1013 21:11:23.893899 4689 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.21:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186e2945b8f9ffbf default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-13 21:11:23.781316543 +0000 UTC m=+0.699561638,LastTimestamp:2025-10-13 21:11:23.781316543 +0000 UTC m=+0.699561638,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.926521 4689 manager.go:334] "Starting Device Plugin manager" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.927392 4689 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.927428 4689 server.go:79] "Starting device plugin registration server" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.927961 4689 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.927983 4689 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.928399 4689 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.928561 4689 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.928576 4689 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 13 21:11:23 crc kubenswrapper[4689]: E1013 21:11:23.934668 4689 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.966814 4689 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.966908 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.968059 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.968185 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.968330 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.968694 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.968891 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.968933 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.970155 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.970314 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.970408 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.970159 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.970567 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.970620 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.970784 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.970962 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.971030 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.972715 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.972745 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.972765 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.973228 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.973351 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.973374 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.973865 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.974468 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.974529 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.976624 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.976669 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.976625 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.976727 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.976688 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.976793 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.977008 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.977270 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.977406 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.978059 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.978157 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.978238 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.978570 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.978718 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.978882 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.978948 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.978967 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.979643 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.979738 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:23 crc kubenswrapper[4689]: I1013 21:11:23.979830 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:23 crc kubenswrapper[4689]: E1013 21:11:23.990137 4689 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.21:6443: connect: connection refused" interval="400ms" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.028657 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.030159 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.030200 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.030211 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.030274 4689 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 13 21:11:24 crc kubenswrapper[4689]: E1013 21:11:24.030904 4689 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.21:6443: connect: connection refused" node="crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.033120 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.033196 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.033243 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.033279 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.033315 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.033400 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.033473 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.033556 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.033675 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.033726 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.033762 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.033833 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.033874 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.033922 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.033998 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.135033 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.135103 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.135128 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.135161 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.135184 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.135205 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.135226 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.135277 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.135300 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.135329 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.135348 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.135371 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.135391 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.135412 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.135446 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.135980 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.136072 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.136112 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.136113 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.136149 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.136190 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.136170 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.136080 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.136216 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.136234 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.136327 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.136343 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.136330 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.136358 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.136618 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.231436 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.233221 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.233261 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.233272 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.233297 4689 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 13 21:11:24 crc kubenswrapper[4689]: E1013 21:11:24.233750 4689 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.21:6443: connect: connection refused" node="crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.306800 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.314072 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.319310 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.358313 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: W1013 21:11:24.363151 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-3e0dd782d6aee8eb3262ae9c82bca058dbc27d6c4fcb7b5f6631bc283ac33363 WatchSource:0}: Error finding container 3e0dd782d6aee8eb3262ae9c82bca058dbc27d6c4fcb7b5f6631bc283ac33363: Status 404 returned error can't find the container with id 3e0dd782d6aee8eb3262ae9c82bca058dbc27d6c4fcb7b5f6631bc283ac33363 Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.364255 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 21:11:24 crc kubenswrapper[4689]: W1013 21:11:24.364718 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-a0669d54d0c917090dcea7e7a83fbcce61fb5559d1923ee015fffcb62c84618b WatchSource:0}: Error finding container a0669d54d0c917090dcea7e7a83fbcce61fb5559d1923ee015fffcb62c84618b: Status 404 returned error can't find the container with id a0669d54d0c917090dcea7e7a83fbcce61fb5559d1923ee015fffcb62c84618b Oct 13 21:11:24 crc kubenswrapper[4689]: W1013 21:11:24.366239 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-f294ac8b2c1e0e70ff01877b0891c3ff6874aac80ffaf3d6f121cfe4644e35db WatchSource:0}: Error finding container f294ac8b2c1e0e70ff01877b0891c3ff6874aac80ffaf3d6f121cfe4644e35db: Status 404 returned error can't find the container with id f294ac8b2c1e0e70ff01877b0891c3ff6874aac80ffaf3d6f121cfe4644e35db Oct 13 21:11:24 crc kubenswrapper[4689]: W1013 21:11:24.386069 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-870fdb4b22c1e32ef246fd5592f4d746ac813f1b2f6fd055dfd0fe2a68162f51 WatchSource:0}: Error finding container 870fdb4b22c1e32ef246fd5592f4d746ac813f1b2f6fd055dfd0fe2a68162f51: Status 404 returned error can't find the container with id 870fdb4b22c1e32ef246fd5592f4d746ac813f1b2f6fd055dfd0fe2a68162f51 Oct 13 21:11:24 crc kubenswrapper[4689]: E1013 21:11:24.391828 4689 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.21:6443: connect: connection refused" interval="800ms" Oct 13 21:11:24 crc kubenswrapper[4689]: W1013 21:11:24.588513 4689 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.21:6443: connect: connection refused Oct 13 21:11:24 crc kubenswrapper[4689]: E1013 21:11:24.589321 4689 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.21:6443: connect: connection refused" logger="UnhandledError" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.634898 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.636933 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.636990 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.637003 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.637038 4689 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 13 21:11:24 crc kubenswrapper[4689]: E1013 21:11:24.637619 4689 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.21:6443: connect: connection refused" node="crc" Oct 13 21:11:24 crc kubenswrapper[4689]: W1013 21:11:24.727536 4689 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.21:6443: connect: connection refused Oct 13 21:11:24 crc kubenswrapper[4689]: E1013 21:11:24.727678 4689 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.21:6443: connect: connection refused" logger="UnhandledError" Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.783857 4689 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.21:6443: connect: connection refused Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.875165 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f294ac8b2c1e0e70ff01877b0891c3ff6874aac80ffaf3d6f121cfe4644e35db"} Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.876249 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3e0dd782d6aee8eb3262ae9c82bca058dbc27d6c4fcb7b5f6631bc283ac33363"} Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.877607 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"a0669d54d0c917090dcea7e7a83fbcce61fb5559d1923ee015fffcb62c84618b"} Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.878655 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"870fdb4b22c1e32ef246fd5592f4d746ac813f1b2f6fd055dfd0fe2a68162f51"} Oct 13 21:11:24 crc kubenswrapper[4689]: I1013 21:11:24.879523 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ead77b51c6ccbe1b013b6c5956d60f9c3dc2948c453f4413539b965cbe37580d"} Oct 13 21:11:25 crc kubenswrapper[4689]: E1013 21:11:25.193235 4689 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.21:6443: connect: connection refused" interval="1.6s" Oct 13 21:11:25 crc kubenswrapper[4689]: W1013 21:11:25.217806 4689 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.21:6443: connect: connection refused Oct 13 21:11:25 crc kubenswrapper[4689]: E1013 21:11:25.217957 4689 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.21:6443: connect: connection refused" logger="UnhandledError" Oct 13 21:11:25 crc kubenswrapper[4689]: W1013 21:11:25.358048 4689 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.21:6443: connect: connection refused Oct 13 21:11:25 crc kubenswrapper[4689]: E1013 21:11:25.358182 4689 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.21:6443: connect: connection refused" logger="UnhandledError" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.438203 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.440013 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.440062 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.440076 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.440107 4689 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 13 21:11:25 crc kubenswrapper[4689]: E1013 21:11:25.441570 4689 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.21:6443: connect: connection refused" node="crc" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.784204 4689 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.21:6443: connect: connection refused Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.886389 4689 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d" exitCode=0 Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.886518 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d"} Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.886619 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.888804 4689 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276" exitCode=0 Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.888900 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276"} Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.888983 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.889147 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.889208 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.889226 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.890326 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.890416 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.890447 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.891135 4689 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6" exitCode=0 Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.891202 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6"} Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.891331 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.891734 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.892896 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.892949 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.892967 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.898030 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.898080 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.898097 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.900263 4689 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de" exitCode=0 Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.900547 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.900701 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de"} Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.902253 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.902279 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.902291 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.905716 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115"} Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.905783 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e"} Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.905808 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9"} Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.905832 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851"} Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.905808 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.907095 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.907129 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:25 crc kubenswrapper[4689]: I1013 21:11:25.907142 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:26 crc kubenswrapper[4689]: W1013 21:11:26.382262 4689 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.21:6443: connect: connection refused Oct 13 21:11:26 crc kubenswrapper[4689]: E1013 21:11:26.383279 4689 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.21:6443: connect: connection refused" logger="UnhandledError" Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.783474 4689 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.21:6443: connect: connection refused Oct 13 21:11:26 crc kubenswrapper[4689]: E1013 21:11:26.794603 4689 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.21:6443: connect: connection refused" interval="3.2s" Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.914997 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc"} Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.915085 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec"} Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.915119 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac"} Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.915143 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35"} Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.918712 4689 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa" exitCode=0 Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.918771 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa"} Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.918860 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.924231 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.924283 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.924298 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.926287 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"9d81c82ccbccf9dedc89e79f02bbb7e9d90dfcc4d8e3a3cab517bed3c59b80c5"} Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.926424 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.927509 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.927538 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.927553 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.929932 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.930095 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.930321 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"af51bb8f542b6d1141422c7d41cd987e860d572c385b90d3f38f5f57cbd902c5"} Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.930358 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"82c0a408a02f1929bf23ad55f145eee897886adc62cc66804f18387d36891c85"} Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.930373 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"04aa23baca3c99a463fd363ffaf50dc400529a0a3d4af30a55b2353eb8e2dd61"} Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.931073 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.931102 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.931114 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.932057 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.932085 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:26 crc kubenswrapper[4689]: I1013 21:11:26.932096 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.042181 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.046238 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.046289 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.046303 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.046337 4689 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 13 21:11:27 crc kubenswrapper[4689]: E1013 21:11:27.047017 4689 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.21:6443: connect: connection refused" node="crc" Oct 13 21:11:27 crc kubenswrapper[4689]: W1013 21:11:27.063704 4689 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.21:6443: connect: connection refused Oct 13 21:11:27 crc kubenswrapper[4689]: E1013 21:11:27.063778 4689 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.21:6443: connect: connection refused" logger="UnhandledError" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.938817 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd"} Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.939003 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.942077 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.942138 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.942159 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.943720 4689 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77" exitCode=0 Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.943806 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77"} Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.943911 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.943938 4689 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.944010 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.944159 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.945816 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.945887 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.945909 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.945824 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.945993 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.946033 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.946362 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.946440 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:27 crc kubenswrapper[4689]: I1013 21:11:27.946465 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:28 crc kubenswrapper[4689]: I1013 21:11:28.955949 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06"} Oct 13 21:11:28 crc kubenswrapper[4689]: I1013 21:11:28.956569 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684"} Oct 13 21:11:28 crc kubenswrapper[4689]: I1013 21:11:28.956041 4689 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 21:11:28 crc kubenswrapper[4689]: I1013 21:11:28.956718 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:28 crc kubenswrapper[4689]: I1013 21:11:28.956632 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1"} Oct 13 21:11:28 crc kubenswrapper[4689]: I1013 21:11:28.958969 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:28 crc kubenswrapper[4689]: I1013 21:11:28.959058 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:28 crc kubenswrapper[4689]: I1013 21:11:28.959086 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:29 crc kubenswrapper[4689]: I1013 21:11:29.966953 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575"} Oct 13 21:11:29 crc kubenswrapper[4689]: I1013 21:11:29.967033 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0"} Oct 13 21:11:29 crc kubenswrapper[4689]: I1013 21:11:29.967207 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:29 crc kubenswrapper[4689]: I1013 21:11:29.968850 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:29 crc kubenswrapper[4689]: I1013 21:11:29.968926 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:29 crc kubenswrapper[4689]: I1013 21:11:29.968947 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.247190 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.249312 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.249358 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.249372 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.249405 4689 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.265358 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.265703 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.267462 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.267549 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.267577 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.635681 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.970222 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.970303 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.972279 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.972344 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.972363 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.972372 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.972423 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:30 crc kubenswrapper[4689]: I1013 21:11:30.972437 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:31 crc kubenswrapper[4689]: I1013 21:11:31.367881 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:11:31 crc kubenswrapper[4689]: I1013 21:11:31.368197 4689 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 21:11:31 crc kubenswrapper[4689]: I1013 21:11:31.368266 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:31 crc kubenswrapper[4689]: I1013 21:11:31.370151 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:31 crc kubenswrapper[4689]: I1013 21:11:31.370215 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:31 crc kubenswrapper[4689]: I1013 21:11:31.370328 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:31 crc kubenswrapper[4689]: I1013 21:11:31.710433 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:11:31 crc kubenswrapper[4689]: I1013 21:11:31.973793 4689 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 21:11:31 crc kubenswrapper[4689]: I1013 21:11:31.974823 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:31 crc kubenswrapper[4689]: I1013 21:11:31.976340 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:31 crc kubenswrapper[4689]: I1013 21:11:31.976406 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:31 crc kubenswrapper[4689]: I1013 21:11:31.976443 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.097754 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.097996 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.099775 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.100172 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.100657 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.105490 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.667196 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.667461 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.668977 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.669014 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.669027 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.918702 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.918946 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.920447 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.920510 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.920528 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.976933 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.978098 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.978152 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:32 crc kubenswrapper[4689]: I1013 21:11:32.978167 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:33 crc kubenswrapper[4689]: I1013 21:11:33.460118 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:11:33 crc kubenswrapper[4689]: I1013 21:11:33.460432 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:33 crc kubenswrapper[4689]: I1013 21:11:33.462315 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:33 crc kubenswrapper[4689]: I1013 21:11:33.462396 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:33 crc kubenswrapper[4689]: I1013 21:11:33.462420 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:33 crc kubenswrapper[4689]: I1013 21:11:33.636782 4689 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 13 21:11:33 crc kubenswrapper[4689]: I1013 21:11:33.636912 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 13 21:11:33 crc kubenswrapper[4689]: I1013 21:11:33.849277 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 21:11:33 crc kubenswrapper[4689]: E1013 21:11:33.934808 4689 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 13 21:11:33 crc kubenswrapper[4689]: I1013 21:11:33.980354 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:33 crc kubenswrapper[4689]: I1013 21:11:33.982061 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:33 crc kubenswrapper[4689]: I1013 21:11:33.982156 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:33 crc kubenswrapper[4689]: I1013 21:11:33.982184 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:36 crc kubenswrapper[4689]: I1013 21:11:36.562178 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 13 21:11:36 crc kubenswrapper[4689]: I1013 21:11:36.562494 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:36 crc kubenswrapper[4689]: I1013 21:11:36.564345 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:36 crc kubenswrapper[4689]: I1013 21:11:36.564407 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:36 crc kubenswrapper[4689]: I1013 21:11:36.564424 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:37 crc kubenswrapper[4689]: I1013 21:11:37.784517 4689 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 13 21:11:38 crc kubenswrapper[4689]: W1013 21:11:38.043388 4689 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 13 21:11:38 crc kubenswrapper[4689]: I1013 21:11:38.043543 4689 trace.go:236] Trace[1537739429]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Oct-2025 21:11:28.041) (total time: 10001ms): Oct 13 21:11:38 crc kubenswrapper[4689]: Trace[1537739429]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (21:11:38.043) Oct 13 21:11:38 crc kubenswrapper[4689]: Trace[1537739429]: [10.001752473s] [10.001752473s] END Oct 13 21:11:38 crc kubenswrapper[4689]: E1013 21:11:38.043580 4689 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 13 21:11:38 crc kubenswrapper[4689]: W1013 21:11:38.235734 4689 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 13 21:11:38 crc kubenswrapper[4689]: I1013 21:11:38.235870 4689 trace.go:236] Trace[743073894]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Oct-2025 21:11:28.234) (total time: 10001ms): Oct 13 21:11:38 crc kubenswrapper[4689]: Trace[743073894]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (21:11:38.235) Oct 13 21:11:38 crc kubenswrapper[4689]: Trace[743073894]: [10.00166828s] [10.00166828s] END Oct 13 21:11:38 crc kubenswrapper[4689]: E1013 21:11:38.235910 4689 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 13 21:11:38 crc kubenswrapper[4689]: I1013 21:11:38.559900 4689 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 13 21:11:38 crc kubenswrapper[4689]: I1013 21:11:38.560021 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 13 21:11:38 crc kubenswrapper[4689]: I1013 21:11:38.576415 4689 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 13 21:11:38 crc kubenswrapper[4689]: I1013 21:11:38.576513 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 13 21:11:40 crc kubenswrapper[4689]: I1013 21:11:40.272840 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 21:11:40 crc kubenswrapper[4689]: I1013 21:11:40.273034 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:40 crc kubenswrapper[4689]: I1013 21:11:40.274337 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:40 crc kubenswrapper[4689]: I1013 21:11:40.274416 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:40 crc kubenswrapper[4689]: I1013 21:11:40.274436 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:41 crc kubenswrapper[4689]: I1013 21:11:41.720381 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:11:41 crc kubenswrapper[4689]: I1013 21:11:41.720713 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:41 crc kubenswrapper[4689]: I1013 21:11:41.722446 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:41 crc kubenswrapper[4689]: I1013 21:11:41.722528 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:41 crc kubenswrapper[4689]: I1013 21:11:41.722554 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:41 crc kubenswrapper[4689]: I1013 21:11:41.726738 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:11:42 crc kubenswrapper[4689]: I1013 21:11:42.003035 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:42 crc kubenswrapper[4689]: I1013 21:11:42.004494 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:42 crc kubenswrapper[4689]: I1013 21:11:42.004554 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:42 crc kubenswrapper[4689]: I1013 21:11:42.004571 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:42 crc kubenswrapper[4689]: I1013 21:11:42.626175 4689 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.562960 4689 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.570881 4689 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.590784 4689 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.590943 4689 trace.go:236] Trace[1605153519]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Oct-2025 21:11:32.331) (total time: 11259ms): Oct 13 21:11:43 crc kubenswrapper[4689]: Trace[1605153519]: ---"Objects listed" error: 11259ms (21:11:43.590) Oct 13 21:11:43 crc kubenswrapper[4689]: Trace[1605153519]: [11.259536357s] [11.259536357s] END Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.590975 4689 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.591446 4689 trace.go:236] Trace[198317661]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Oct-2025 21:11:33.221) (total time: 10369ms): Oct 13 21:11:43 crc kubenswrapper[4689]: Trace[198317661]: ---"Objects listed" error: 10369ms (21:11:43.591) Oct 13 21:11:43 crc kubenswrapper[4689]: Trace[198317661]: [10.369462483s] [10.369462483s] END Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.591495 4689 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.636881 4689 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.636972 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.692436 4689 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": EOF" start-of-body= Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.692504 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": EOF" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.692556 4689 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:54460->192.168.126.11:17697: read: connection reset by peer" start-of-body= Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.692678 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:54460->192.168.126.11:17697: read: connection reset by peer" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.693553 4689 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.693600 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.693819 4689 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.693841 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.777720 4689 apiserver.go:52] "Watching apiserver" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.782252 4689 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.782616 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g"] Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.783200 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.783259 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.783287 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.783312 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.783398 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.783616 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.783669 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.783684 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.784232 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.785347 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.785491 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.785630 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.785716 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.785820 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.787375 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.787444 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.787450 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.787654 4689 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.788940 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791443 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791483 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791511 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791533 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791558 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791580 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791614 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791637 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791665 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791686 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791708 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791728 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791749 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791774 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791797 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791817 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791837 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791860 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791878 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791926 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.791951 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.792000 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.792025 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.792299 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.792374 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.792563 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.792636 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.794570 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.794775 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.798862 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.795098 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.795097 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.795445 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.795457 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.795569 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.795730 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.795819 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.795908 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.796296 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.796395 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.796530 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.797050 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.797143 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.797579 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.797703 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.797961 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.799167 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.799250 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.799639 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.799673 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.799696 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.799726 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.799750 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.799772 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.799794 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.799826 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.799848 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.799867 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.799889 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.799911 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.799929 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.799952 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.799978 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800005 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800032 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800058 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800084 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800106 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800129 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800153 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800173 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800196 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800221 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800244 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800265 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800290 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800314 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800336 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800383 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800406 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800431 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800461 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800480 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800503 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800525 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800546 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800633 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800671 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800712 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.801009 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.801300 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.801390 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.801563 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.800737 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.801847 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.802429 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.802650 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803210 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803219 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803258 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803291 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803315 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803345 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803375 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803383 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803398 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803423 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803449 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803473 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803493 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803516 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803540 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803560 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803598 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803633 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803738 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803814 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803863 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803907 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803942 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.803973 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804004 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804039 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804070 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804107 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804138 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804171 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804214 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804241 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804259 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804294 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804306 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804336 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804403 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804437 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804462 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804491 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804527 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804555 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804640 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804670 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804693 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804718 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804746 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804773 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804785 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804799 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804886 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804925 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804962 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.804991 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805063 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805091 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805122 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805151 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805178 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805208 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805233 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805258 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805284 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805310 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805333 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805359 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805382 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805408 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805443 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805473 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805496 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805532 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805564 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805609 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805628 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805654 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805682 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805708 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805735 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805759 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805783 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805812 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805843 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805870 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805892 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805918 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805953 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806052 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806079 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806100 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806122 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806147 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806174 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806197 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806225 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806249 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806276 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806307 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806335 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806360 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806383 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806415 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806443 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806468 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806496 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806539 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806566 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806607 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806631 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805118 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.805843 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806095 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806305 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.807319 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.807413 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.808648 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.809623 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.809656 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.806653 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.809839 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.809885 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.809920 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.809948 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.809980 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810014 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810054 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810084 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810115 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810144 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810168 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810188 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810211 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810240 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810272 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810306 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810338 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810361 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810386 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810407 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810427 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810505 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810544 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810666 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810714 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810694 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810740 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810870 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.810973 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.811052 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.811119 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.811187 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.811263 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.811262 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.811332 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.811374 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.811388 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.811485 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.811852 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.811962 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.812188 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.812222 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.812929 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.813017 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.813419 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.813685 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.813796 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.814176 4689 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.814544 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:44.314480219 +0000 UTC m=+21.232725304 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.816460 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.816520 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.816812 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.817056 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.817152 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.819759 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.819752 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.819815 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.820104 4689 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.820118 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.820202 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:44.3201716 +0000 UTC m=+21.238416685 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.820227 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.820278 4689 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.820283 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.820520 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.820554 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.820617 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.820715 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.820890 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.820911 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.821157 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.821253 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.821295 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.821302 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.821702 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.821711 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.821725 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.821937 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.822151 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.822241 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.822839 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.823534 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.823691 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.823618 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.823649 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.824066 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.824176 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.824202 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.824266 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.824655 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.824662 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.824760 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.825008 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.825064 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.825176 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.826064 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.826573 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.831617 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.831742 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.831955 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.832197 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.832509 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.832852 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.833121 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.833254 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.833394 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.833694 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.833851 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.833856 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.834055 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.834748 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.835702 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.835760 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.835814 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.836285 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.836381 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.836770 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.836690 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.836937 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.836983 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.837310 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.837400 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.837477 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.838120 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.838254 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.838409 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.838287 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.838668 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.838841 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.838973 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:11:44.338943841 +0000 UTC m=+21.257188926 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.839533 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.839811 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.839984 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.840181 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.840414 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.840437 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.840457 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.840464 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.840411 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.841104 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.841137 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.841213 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.841302 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.841500 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.841664 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.841737 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.841743 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.842053 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.842240 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.842315 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.842499 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.843161 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.843227 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.843852 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.843938 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.844037 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.844531 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.844706 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.844768 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.849148 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.850633 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.850809 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.851061 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.852626 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.852922 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.853250 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.854517 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855459 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855506 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855532 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855549 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855566 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855579 4689 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855613 4689 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855627 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855643 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855657 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855670 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855684 4689 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855697 4689 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855709 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855720 4689 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855827 4689 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855840 4689 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855853 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855866 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855880 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855894 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855906 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855920 4689 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855933 4689 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855949 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855961 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855974 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855987 4689 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.855999 4689 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856014 4689 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856025 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856038 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856051 4689 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856064 4689 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856078 4689 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856092 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856105 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856120 4689 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856133 4689 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856144 4689 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856156 4689 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856170 4689 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856181 4689 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856192 4689 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856204 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856217 4689 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856230 4689 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.856242 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.858004 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.859203 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.859514 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.862567 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.864164 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.864372 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.864435 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.864457 4689 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.864632 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:44.36457842 +0000 UTC m=+21.282823625 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.865994 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.866037 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.866051 4689 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:43 crc kubenswrapper[4689]: E1013 21:11:43.866293 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:44.36627427 +0000 UTC m=+21.284519355 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.866400 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.867629 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.867741 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.868775 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.870968 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.871367 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.871389 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.871411 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.871424 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.871636 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.871564 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.871809 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.872622 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.873156 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.873868 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.874004 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.874492 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.874199 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.874876 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.875208 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.875445 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.875508 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.876234 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.876455 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.876752 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.876877 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.878155 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.878437 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.879320 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.882008 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.882313 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.885354 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.887012 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.889062 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.889487 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.890429 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.891876 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.894695 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.895537 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.895667 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.897473 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.898000 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.898490 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.899161 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.900074 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.901339 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.901953 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.903195 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.903514 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.903717 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.904012 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.904324 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.905614 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.906158 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.907334 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.907811 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.908895 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.909330 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.910017 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.911622 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.912183 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.914162 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.914896 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.917785 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.918783 4689 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.919220 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.921997 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.922983 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.924710 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.928091 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.929040 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.930190 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.930993 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.932494 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.933088 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.933921 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.934327 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.935066 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.936198 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.936951 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.938094 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.938931 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.940292 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.941053 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.942170 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.942858 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.943868 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.944029 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.944740 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.945855 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.954867 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.956803 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.956853 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.956920 4689 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.956934 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.956946 4689 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.956957 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.956966 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.956976 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.956987 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.956996 4689 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957006 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957014 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957024 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957085 4689 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957095 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957105 4689 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957115 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957124 4689 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957133 4689 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957142 4689 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957153 4689 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957162 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957173 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957182 4689 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957192 4689 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957201 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957212 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957221 4689 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957233 4689 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957242 4689 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957251 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957264 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957274 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957412 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957456 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957512 4689 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957523 4689 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957537 4689 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957545 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957557 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957567 4689 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957577 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957610 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957619 4689 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957627 4689 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957638 4689 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957647 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957658 4689 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957667 4689 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957702 4689 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957711 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957719 4689 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957728 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957736 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957745 4689 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957753 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957761 4689 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957769 4689 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957778 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957787 4689 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957796 4689 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957805 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957814 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957822 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957831 4689 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957840 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957849 4689 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957861 4689 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957870 4689 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957878 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957887 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957896 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957904 4689 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957912 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957921 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957929 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957937 4689 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957946 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957956 4689 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957964 4689 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957972 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957980 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.957994 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958004 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958012 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958020 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958029 4689 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958037 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958047 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958056 4689 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958063 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958073 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958081 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958089 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958097 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958105 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958113 4689 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958122 4689 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958130 4689 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958138 4689 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958147 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958155 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958164 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958173 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958182 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958191 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958200 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958208 4689 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958216 4689 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958225 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958235 4689 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958245 4689 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958255 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958264 4689 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958273 4689 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958283 4689 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958292 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958303 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958311 4689 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958319 4689 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958327 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958336 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958345 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958354 4689 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958363 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958371 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958379 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958388 4689 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958397 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958405 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958414 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958424 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958432 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958443 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958452 4689 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958460 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958469 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958477 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958485 4689 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958494 4689 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958502 4689 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958510 4689 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958517 4689 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958525 4689 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958534 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958542 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958550 4689 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958558 4689 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958566 4689 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958574 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.958598 4689 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.977468 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:43 crc kubenswrapper[4689]: I1013 21:11:43.993731 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.005412 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.011555 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.015454 4689 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd" exitCode=255 Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.015574 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd"} Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.017359 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.037728 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.038446 4689 scope.go:117] "RemoveContainer" containerID="d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.040773 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.091702 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.092522 4689 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.105511 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.108093 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.123381 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.137200 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.149876 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.164567 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.167220 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.182352 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:44 crc kubenswrapper[4689]: W1013 21:11:44.183910 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-c5b5f0fdf3124f4f2c3a5a70a9f93bf702dd7aab9da027e8ed9f175ca278d61f WatchSource:0}: Error finding container c5b5f0fdf3124f4f2c3a5a70a9f93bf702dd7aab9da027e8ed9f175ca278d61f: Status 404 returned error can't find the container with id c5b5f0fdf3124f4f2c3a5a70a9f93bf702dd7aab9da027e8ed9f175ca278d61f Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.192338 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.199412 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.213486 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.363052 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:11:44 crc kubenswrapper[4689]: E1013 21:11:44.363163 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:11:45.363144472 +0000 UTC m=+22.281389557 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.363328 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.363355 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:44 crc kubenswrapper[4689]: E1013 21:11:44.363439 4689 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 21:11:44 crc kubenswrapper[4689]: E1013 21:11:44.363485 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:45.36347867 +0000 UTC m=+22.281723755 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 21:11:44 crc kubenswrapper[4689]: E1013 21:11:44.363652 4689 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 21:11:44 crc kubenswrapper[4689]: E1013 21:11:44.363772 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:45.363739726 +0000 UTC m=+22.281984891 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.464311 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.464373 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:11:44 crc kubenswrapper[4689]: E1013 21:11:44.464540 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 21:11:44 crc kubenswrapper[4689]: E1013 21:11:44.464563 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 21:11:44 crc kubenswrapper[4689]: E1013 21:11:44.464577 4689 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:44 crc kubenswrapper[4689]: E1013 21:11:44.464604 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 21:11:44 crc kubenswrapper[4689]: E1013 21:11:44.464657 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 21:11:44 crc kubenswrapper[4689]: E1013 21:11:44.464664 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:45.464648326 +0000 UTC m=+22.382893411 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:44 crc kubenswrapper[4689]: E1013 21:11:44.464675 4689 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:44 crc kubenswrapper[4689]: E1013 21:11:44.464785 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:45.464757018 +0000 UTC m=+22.383002103 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:44 crc kubenswrapper[4689]: I1013 21:11:44.866464 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:44 crc kubenswrapper[4689]: E1013 21:11:44.866622 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.012574 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-vxqkf"] Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.012947 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-vxqkf" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.015732 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.015967 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.016398 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.020165 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501"} Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.020225 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0"} Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.020178 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.020255 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"616591950d1d27f60c2df78ca58cf57d4f12deb8dc8491110fb1e8f93aeb4218"} Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.024629 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.026972 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530"} Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.029424 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.032390 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.038190 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb"} Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.038250 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"191b3606d8ce480c4e9e7963b3fa70c7e28da700a5bc20e0f177274c2cbd086c"} Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.044436 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"c5b5f0fdf3124f4f2c3a5a70a9f93bf702dd7aab9da027e8ed9f175ca278d61f"} Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.073124 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.099730 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.117576 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.137196 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.158573 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.169516 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d172f3cf-bf0e-4051-8128-9fbaae5e2e70-host\") pod \"node-ca-vxqkf\" (UID: \"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\") " pod="openshift-image-registry/node-ca-vxqkf" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.169705 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/d172f3cf-bf0e-4051-8128-9fbaae5e2e70-serviceca\") pod \"node-ca-vxqkf\" (UID: \"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\") " pod="openshift-image-registry/node-ca-vxqkf" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.169823 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6vt7\" (UniqueName: \"kubernetes.io/projected/d172f3cf-bf0e-4051-8128-9fbaae5e2e70-kube-api-access-d6vt7\") pod \"node-ca-vxqkf\" (UID: \"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\") " pod="openshift-image-registry/node-ca-vxqkf" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.219033 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.253193 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.270600 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d172f3cf-bf0e-4051-8128-9fbaae5e2e70-host\") pod \"node-ca-vxqkf\" (UID: \"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\") " pod="openshift-image-registry/node-ca-vxqkf" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.270648 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/d172f3cf-bf0e-4051-8128-9fbaae5e2e70-serviceca\") pod \"node-ca-vxqkf\" (UID: \"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\") " pod="openshift-image-registry/node-ca-vxqkf" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.270688 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6vt7\" (UniqueName: \"kubernetes.io/projected/d172f3cf-bf0e-4051-8128-9fbaae5e2e70-kube-api-access-d6vt7\") pod \"node-ca-vxqkf\" (UID: \"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\") " pod="openshift-image-registry/node-ca-vxqkf" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.270771 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d172f3cf-bf0e-4051-8128-9fbaae5e2e70-host\") pod \"node-ca-vxqkf\" (UID: \"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\") " pod="openshift-image-registry/node-ca-vxqkf" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.272832 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/d172f3cf-bf0e-4051-8128-9fbaae5e2e70-serviceca\") pod \"node-ca-vxqkf\" (UID: \"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\") " pod="openshift-image-registry/node-ca-vxqkf" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.280006 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.286629 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6vt7\" (UniqueName: \"kubernetes.io/projected/d172f3cf-bf0e-4051-8128-9fbaae5e2e70-kube-api-access-d6vt7\") pod \"node-ca-vxqkf\" (UID: \"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\") " pod="openshift-image-registry/node-ca-vxqkf" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.305149 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.326442 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-vxqkf" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.333843 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: W1013 21:11:45.340802 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd172f3cf_bf0e_4051_8128_9fbaae5e2e70.slice/crio-e2d1ebb49baeab24fdf66645a88ab62236223d31e13c2eebe419f2a6b584760d WatchSource:0}: Error finding container e2d1ebb49baeab24fdf66645a88ab62236223d31e13c2eebe419f2a6b584760d: Status 404 returned error can't find the container with id e2d1ebb49baeab24fdf66645a88ab62236223d31e13c2eebe419f2a6b584760d Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.362867 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.371547 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.371649 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.371688 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:45 crc kubenswrapper[4689]: E1013 21:11:45.371727 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:11:47.371703368 +0000 UTC m=+24.289948453 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:11:45 crc kubenswrapper[4689]: E1013 21:11:45.371811 4689 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 21:11:45 crc kubenswrapper[4689]: E1013 21:11:45.371912 4689 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 21:11:45 crc kubenswrapper[4689]: E1013 21:11:45.371944 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:47.371920723 +0000 UTC m=+24.290166038 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 21:11:45 crc kubenswrapper[4689]: E1013 21:11:45.372015 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:47.371991865 +0000 UTC m=+24.290236950 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.396620 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.429792 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.444142 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.458372 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.472654 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.472728 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:11:45 crc kubenswrapper[4689]: E1013 21:11:45.472887 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 21:11:45 crc kubenswrapper[4689]: E1013 21:11:45.472911 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 21:11:45 crc kubenswrapper[4689]: E1013 21:11:45.472908 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 21:11:45 crc kubenswrapper[4689]: E1013 21:11:45.472966 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 21:11:45 crc kubenswrapper[4689]: E1013 21:11:45.472982 4689 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:45 crc kubenswrapper[4689]: E1013 21:11:45.473054 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:47.473029127 +0000 UTC m=+24.391274422 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:45 crc kubenswrapper[4689]: E1013 21:11:45.472926 4689 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:45 crc kubenswrapper[4689]: E1013 21:11:45.473108 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:47.473101309 +0000 UTC m=+24.391346384 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.813866 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-xr7rr"] Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.815047 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-w5fqm"] Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.815647 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-4zr4r"] Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.815672 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.815873 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.817671 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.817794 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-vvvxg"] Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.818329 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-vvvxg" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.820847 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.822175 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.822374 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.822562 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.822720 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.822809 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.823754 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.825641 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.826137 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.826303 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.826703 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.833669 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.833969 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.834063 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.834116 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.836807 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.854003 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.866611 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.866645 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:11:45 crc kubenswrapper[4689]: E1013 21:11:45.866743 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:11:45 crc kubenswrapper[4689]: E1013 21:11:45.866934 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.869848 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.885270 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.901609 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.918564 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.933191 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.955201 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977054 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-multus-socket-dir-parent\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977117 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/15ff2f20-3071-4cf3-80b2-37e3e36d731b-cnibin\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977147 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-run-k8s-cni-cncf-io\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977175 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1863da92-265f-451e-a741-a184c8d3f781-proxy-tls\") pod \"machine-config-daemon-w5fqm\" (UID: \"1863da92-265f-451e-a741-a184c8d3f781\") " pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977281 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/15ff2f20-3071-4cf3-80b2-37e3e36d731b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977366 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/12667451-309d-4f81-9d93-ffd3c3299a41-hosts-file\") pod \"node-resolver-vvvxg\" (UID: \"12667451-309d-4f81-9d93-ffd3c3299a41\") " pod="openshift-dns/node-resolver-vvvxg" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977391 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-cni-binary-copy\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977410 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-var-lib-cni-bin\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977426 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/15ff2f20-3071-4cf3-80b2-37e3e36d731b-os-release\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977445 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-os-release\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977466 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-run-netns\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977535 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-hostroot\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977561 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-etc-kubernetes\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977600 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1863da92-265f-451e-a741-a184c8d3f781-rootfs\") pod \"machine-config-daemon-w5fqm\" (UID: \"1863da92-265f-451e-a741-a184c8d3f781\") " pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977626 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/15ff2f20-3071-4cf3-80b2-37e3e36d731b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977664 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-run-multus-certs\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977688 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/15ff2f20-3071-4cf3-80b2-37e3e36d731b-cni-binary-copy\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977736 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-multus-conf-dir\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977764 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-multus-cni-dir\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977789 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-cnibin\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977811 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mv27l\" (UniqueName: \"kubernetes.io/projected/15ff2f20-3071-4cf3-80b2-37e3e36d731b-kube-api-access-mv27l\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977916 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mztkn\" (UniqueName: \"kubernetes.io/projected/12667451-309d-4f81-9d93-ffd3c3299a41-kube-api-access-mztkn\") pod \"node-resolver-vvvxg\" (UID: \"12667451-309d-4f81-9d93-ffd3c3299a41\") " pod="openshift-dns/node-resolver-vvvxg" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.977983 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-system-cni-dir\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.978007 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-var-lib-cni-multus\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.978034 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m75c9\" (UniqueName: \"kubernetes.io/projected/1863da92-265f-451e-a741-a184c8d3f781-kube-api-access-m75c9\") pod \"machine-config-daemon-w5fqm\" (UID: \"1863da92-265f-451e-a741-a184c8d3f781\") " pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.978135 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-multus-daemon-config\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.978212 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr52b\" (UniqueName: \"kubernetes.io/projected/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-kube-api-access-zr52b\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.978243 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-var-lib-kubelet\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.978277 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1863da92-265f-451e-a741-a184c8d3f781-mcd-auth-proxy-config\") pod \"machine-config-daemon-w5fqm\" (UID: \"1863da92-265f-451e-a741-a184c8d3f781\") " pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.978353 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/15ff2f20-3071-4cf3-80b2-37e3e36d731b-system-cni-dir\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:45 crc kubenswrapper[4689]: I1013 21:11:45.991674 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:45Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.013873 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.039346 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.048730 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-vxqkf" event={"ID":"d172f3cf-bf0e-4051-8128-9fbaae5e2e70","Type":"ContainerStarted","Data":"e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b"} Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.048811 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-vxqkf" event={"ID":"d172f3cf-bf0e-4051-8128-9fbaae5e2e70","Type":"ContainerStarted","Data":"e2d1ebb49baeab24fdf66645a88ab62236223d31e13c2eebe419f2a6b584760d"} Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.055870 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.066190 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.078516 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.078981 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/15ff2f20-3071-4cf3-80b2-37e3e36d731b-system-cni-dir\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.079041 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-multus-socket-dir-parent\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.079075 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/15ff2f20-3071-4cf3-80b2-37e3e36d731b-cnibin\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.079112 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/15ff2f20-3071-4cf3-80b2-37e3e36d731b-system-cni-dir\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.079112 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-run-k8s-cni-cncf-io\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.079186 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1863da92-265f-451e-a741-a184c8d3f781-proxy-tls\") pod \"machine-config-daemon-w5fqm\" (UID: \"1863da92-265f-451e-a741-a184c8d3f781\") " pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.079220 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/15ff2f20-3071-4cf3-80b2-37e3e36d731b-cnibin\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.079261 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-run-k8s-cni-cncf-io\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.079225 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/15ff2f20-3071-4cf3-80b2-37e3e36d731b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.079219 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-multus-socket-dir-parent\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.079399 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-cni-binary-copy\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080432 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-var-lib-cni-bin\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080464 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/15ff2f20-3071-4cf3-80b2-37e3e36d731b-os-release\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080242 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/15ff2f20-3071-4cf3-80b2-37e3e36d731b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080492 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/12667451-309d-4f81-9d93-ffd3c3299a41-hosts-file\") pod \"node-resolver-vvvxg\" (UID: \"12667451-309d-4f81-9d93-ffd3c3299a41\") " pod="openshift-dns/node-resolver-vvvxg" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080520 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-os-release\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080553 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-run-netns\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080577 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-hostroot\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080605 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/12667451-309d-4f81-9d93-ffd3c3299a41-hosts-file\") pod \"node-resolver-vvvxg\" (UID: \"12667451-309d-4f81-9d93-ffd3c3299a41\") " pod="openshift-dns/node-resolver-vvvxg" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080607 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-var-lib-cni-bin\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080648 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-run-netns\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080657 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-etc-kubernetes\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080624 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-etc-kubernetes\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080716 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1863da92-265f-451e-a741-a184c8d3f781-rootfs\") pod \"machine-config-daemon-w5fqm\" (UID: \"1863da92-265f-451e-a741-a184c8d3f781\") " pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080689 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-hostroot\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080745 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/15ff2f20-3071-4cf3-80b2-37e3e36d731b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080786 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1863da92-265f-451e-a741-a184c8d3f781-rootfs\") pod \"machine-config-daemon-w5fqm\" (UID: \"1863da92-265f-451e-a741-a184c8d3f781\") " pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080791 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-run-multus-certs\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080815 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/15ff2f20-3071-4cf3-80b2-37e3e36d731b-os-release\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080823 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/15ff2f20-3071-4cf3-80b2-37e3e36d731b-cni-binary-copy\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080864 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-run-multus-certs\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080922 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-multus-conf-dir\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080948 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-multus-cni-dir\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080966 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-cnibin\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080986 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mv27l\" (UniqueName: \"kubernetes.io/projected/15ff2f20-3071-4cf3-80b2-37e3e36d731b-kube-api-access-mv27l\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.081002 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-multus-conf-dir\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.081015 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-var-lib-cni-multus\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080960 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-os-release\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.081035 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-cnibin\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.081044 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mztkn\" (UniqueName: \"kubernetes.io/projected/12667451-309d-4f81-9d93-ffd3c3299a41-kube-api-access-mztkn\") pod \"node-resolver-vvvxg\" (UID: \"12667451-309d-4f81-9d93-ffd3c3299a41\") " pod="openshift-dns/node-resolver-vvvxg" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.081104 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-multus-cni-dir\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.081129 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-var-lib-cni-multus\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.081161 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-system-cni-dir\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.081229 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m75c9\" (UniqueName: \"kubernetes.io/projected/1863da92-265f-451e-a741-a184c8d3f781-kube-api-access-m75c9\") pod \"machine-config-daemon-w5fqm\" (UID: \"1863da92-265f-451e-a741-a184c8d3f781\") " pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.081247 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-system-cni-dir\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.081266 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-multus-daemon-config\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.081283 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr52b\" (UniqueName: \"kubernetes.io/projected/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-kube-api-access-zr52b\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.081300 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-var-lib-kubelet\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.081321 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1863da92-265f-451e-a741-a184c8d3f781-mcd-auth-proxy-config\") pod \"machine-config-daemon-w5fqm\" (UID: \"1863da92-265f-451e-a741-a184c8d3f781\") " pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.081445 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-host-var-lib-kubelet\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.080374 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-cni-binary-copy\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.081517 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/15ff2f20-3071-4cf3-80b2-37e3e36d731b-cni-binary-copy\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.081696 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/15ff2f20-3071-4cf3-80b2-37e3e36d731b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.082012 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-multus-daemon-config\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.082193 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1863da92-265f-451e-a741-a184c8d3f781-mcd-auth-proxy-config\") pod \"machine-config-daemon-w5fqm\" (UID: \"1863da92-265f-451e-a741-a184c8d3f781\") " pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.085027 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1863da92-265f-451e-a741-a184c8d3f781-proxy-tls\") pod \"machine-config-daemon-w5fqm\" (UID: \"1863da92-265f-451e-a741-a184c8d3f781\") " pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.097958 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m75c9\" (UniqueName: \"kubernetes.io/projected/1863da92-265f-451e-a741-a184c8d3f781-kube-api-access-m75c9\") pod \"machine-config-daemon-w5fqm\" (UID: \"1863da92-265f-451e-a741-a184c8d3f781\") " pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.099502 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.101223 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mv27l\" (UniqueName: \"kubernetes.io/projected/15ff2f20-3071-4cf3-80b2-37e3e36d731b-kube-api-access-mv27l\") pod \"multus-additional-cni-plugins-4zr4r\" (UID: \"15ff2f20-3071-4cf3-80b2-37e3e36d731b\") " pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.107659 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mztkn\" (UniqueName: \"kubernetes.io/projected/12667451-309d-4f81-9d93-ffd3c3299a41-kube-api-access-mztkn\") pod \"node-resolver-vvvxg\" (UID: \"12667451-309d-4f81-9d93-ffd3c3299a41\") " pod="openshift-dns/node-resolver-vvvxg" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.111770 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr52b\" (UniqueName: \"kubernetes.io/projected/632b68ca-d2a4-4570-a0a2-8ea8d204fb59-kube-api-access-zr52b\") pod \"multus-xr7rr\" (UID: \"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\") " pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.121421 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.135761 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.136355 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-xr7rr" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.144766 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.146785 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.153220 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.158940 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-vvvxg" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.161602 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: W1013 21:11:46.164202 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-6ed6f38c0db417a1e8db2f72c28e99f894bb98735f24440c3c10907aec339dfa WatchSource:0}: Error finding container 6ed6f38c0db417a1e8db2f72c28e99f894bb98735f24440c3c10907aec339dfa: Status 404 returned error can't find the container with id 6ed6f38c0db417a1e8db2f72c28e99f894bb98735f24440c3c10907aec339dfa Oct 13 21:11:46 crc kubenswrapper[4689]: W1013 21:11:46.183907 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15ff2f20_3071_4cf3_80b2_37e3e36d731b.slice/crio-aceb2ac106730c4499cea64b79d44698a23e07b5bb1fffa8764290d3c725fa4e WatchSource:0}: Error finding container aceb2ac106730c4499cea64b79d44698a23e07b5bb1fffa8764290d3c725fa4e: Status 404 returned error can't find the container with id aceb2ac106730c4499cea64b79d44698a23e07b5bb1fffa8764290d3c725fa4e Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.184144 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.224562 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xml6c"] Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.225150 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.225492 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.228300 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.229122 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.229248 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.229295 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.229377 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.229611 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.229640 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.243764 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.259763 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.273876 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.286927 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.300767 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.316031 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.333963 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.348944 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.368208 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387206 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-var-lib-openvswitch\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387267 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-ovn\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387289 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qn8m\" (UniqueName: \"kubernetes.io/projected/5736acbe-9793-447e-9e22-76b0f407bfb7-kube-api-access-9qn8m\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387328 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-log-socket\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387370 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-run-netns\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387412 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-kubelet\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387429 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-slash\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387448 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-systemd\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387472 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-run-ovn-kubernetes\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387495 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387517 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5736acbe-9793-447e-9e22-76b0f407bfb7-ovn-node-metrics-cert\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387544 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-cni-bin\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387568 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-ovnkube-config\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387621 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-env-overrides\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387645 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-ovnkube-script-lib\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387678 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-openvswitch\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387707 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-cni-netd\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387884 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-systemd-units\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387935 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-etc-openvswitch\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.387990 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-node-log\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.389878 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.412095 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.426424 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.442654 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.461802 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.488742 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-cni-bin\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.488801 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-ovnkube-config\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.488819 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-openvswitch\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.488838 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-env-overrides\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.488854 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-ovnkube-script-lib\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.488870 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-cni-netd\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.488887 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-systemd-units\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.488907 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-etc-openvswitch\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.488905 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-cni-bin\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.488924 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-node-log\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.488983 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-node-log\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.488997 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-openvswitch\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489035 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-var-lib-openvswitch\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489047 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-systemd-units\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489062 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-ovn\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489087 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-var-lib-openvswitch\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489026 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-cni-netd\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489091 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-ovn\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489128 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-etc-openvswitch\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489128 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qn8m\" (UniqueName: \"kubernetes.io/projected/5736acbe-9793-447e-9e22-76b0f407bfb7-kube-api-access-9qn8m\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489334 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-log-socket\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489406 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-run-netns\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489449 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-kubelet\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489478 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-slash\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489502 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-systemd\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489526 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-run-ovn-kubernetes\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489530 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-run-netns\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489559 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489606 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-slash\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489614 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5736acbe-9793-447e-9e22-76b0f407bfb7-ovn-node-metrics-cert\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489612 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-env-overrides\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489640 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-kubelet\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489566 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-log-socket\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489693 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-run-ovn-kubernetes\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489702 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489811 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-systemd\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489823 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-ovnkube-config\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.489906 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-ovnkube-script-lib\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.495130 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5736acbe-9793-447e-9e22-76b0f407bfb7-ovn-node-metrics-cert\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.507735 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qn8m\" (UniqueName: \"kubernetes.io/projected/5736acbe-9793-447e-9e22-76b0f407bfb7-kube-api-access-9qn8m\") pod \"ovnkube-node-xml6c\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.547922 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:46 crc kubenswrapper[4689]: W1013 21:11:46.564607 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5736acbe_9793_447e_9e22_76b0f407bfb7.slice/crio-e36d3a3bd648cf9b359177af5b7bb1867f9b32b01bb0ec44207846f1d70c27be WatchSource:0}: Error finding container e36d3a3bd648cf9b359177af5b7bb1867f9b32b01bb0ec44207846f1d70c27be: Status 404 returned error can't find the container with id e36d3a3bd648cf9b359177af5b7bb1867f9b32b01bb0ec44207846f1d70c27be Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.594766 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.606845 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.612873 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.620528 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.630284 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.654281 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.668567 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.685704 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.703570 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.718029 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.732866 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.746481 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.760919 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.773318 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.788821 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.799478 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.814626 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.826190 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.841182 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.852796 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.866040 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.866505 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:46 crc kubenswrapper[4689]: E1013 21:11:46.866684 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.886193 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.905755 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.919365 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.931605 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.943289 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.956115 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.972495 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:46 crc kubenswrapper[4689]: I1013 21:11:46.987198 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.001547 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:46Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.054426 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xr7rr" event={"ID":"632b68ca-d2a4-4570-a0a2-8ea8d204fb59","Type":"ContainerStarted","Data":"d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44"} Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.054501 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xr7rr" event={"ID":"632b68ca-d2a4-4570-a0a2-8ea8d204fb59","Type":"ContainerStarted","Data":"84ea5710c8bffce302af81bd28e99c44203bd6fe97e28797a46e3ad5f9ef014d"} Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.056234 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-vvvxg" event={"ID":"12667451-309d-4f81-9d93-ffd3c3299a41","Type":"ContainerStarted","Data":"c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323"} Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.056274 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-vvvxg" event={"ID":"12667451-309d-4f81-9d93-ffd3c3299a41","Type":"ContainerStarted","Data":"003e615bd84c69a7a23db5607d134818ba3aa6d69e42d3053359b4a91d8b00a3"} Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.058484 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerStarted","Data":"d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df"} Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.058648 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerStarted","Data":"547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340"} Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.058687 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerStarted","Data":"6ed6f38c0db417a1e8db2f72c28e99f894bb98735f24440c3c10907aec339dfa"} Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.059957 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed"} Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.061597 4689 generic.go:334] "Generic (PLEG): container finished" podID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerID="108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1" exitCode=0 Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.061687 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerDied","Data":"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1"} Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.061717 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerStarted","Data":"e36d3a3bd648cf9b359177af5b7bb1867f9b32b01bb0ec44207846f1d70c27be"} Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.063620 4689 generic.go:334] "Generic (PLEG): container finished" podID="15ff2f20-3071-4cf3-80b2-37e3e36d731b" containerID="43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488" exitCode=0 Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.063904 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" event={"ID":"15ff2f20-3071-4cf3-80b2-37e3e36d731b","Type":"ContainerDied","Data":"43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488"} Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.063990 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" event={"ID":"15ff2f20-3071-4cf3-80b2-37e3e36d731b","Type":"ContainerStarted","Data":"aceb2ac106730c4499cea64b79d44698a23e07b5bb1fffa8764290d3c725fa4e"} Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.081132 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.101868 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.118798 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.136932 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.148942 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.162137 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.180796 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.195977 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.230730 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.267841 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.316094 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.355604 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.386754 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.405547 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:11:47 crc kubenswrapper[4689]: E1013 21:11:47.405794 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:11:51.405751688 +0000 UTC m=+28.323996773 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.406241 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.406317 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:47 crc kubenswrapper[4689]: E1013 21:11:47.406410 4689 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 21:11:47 crc kubenswrapper[4689]: E1013 21:11:47.406483 4689 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 21:11:47 crc kubenswrapper[4689]: E1013 21:11:47.406508 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:51.406485975 +0000 UTC m=+28.324731060 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 21:11:47 crc kubenswrapper[4689]: E1013 21:11:47.406540 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:51.406521656 +0000 UTC m=+28.324766741 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.431396 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.470306 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.507109 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.507341 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.507383 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:11:47 crc kubenswrapper[4689]: E1013 21:11:47.507511 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 21:11:47 crc kubenswrapper[4689]: E1013 21:11:47.507534 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 21:11:47 crc kubenswrapper[4689]: E1013 21:11:47.507547 4689 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:47 crc kubenswrapper[4689]: E1013 21:11:47.507606 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:51.507574459 +0000 UTC m=+28.425819534 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:47 crc kubenswrapper[4689]: E1013 21:11:47.507512 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 21:11:47 crc kubenswrapper[4689]: E1013 21:11:47.507633 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 21:11:47 crc kubenswrapper[4689]: E1013 21:11:47.507647 4689 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:47 crc kubenswrapper[4689]: E1013 21:11:47.507691 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:51.507680021 +0000 UTC m=+28.425925126 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.557346 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.617906 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.642046 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.667698 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.708847 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.752066 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.795347 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.828166 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.867097 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.867115 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:11:47 crc kubenswrapper[4689]: E1013 21:11:47.867257 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:11:47 crc kubenswrapper[4689]: E1013 21:11:47.867346 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.867677 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.910026 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.955079 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:47 crc kubenswrapper[4689]: I1013 21:11:47.987912 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:47Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.071728 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerStarted","Data":"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397"} Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.071800 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerStarted","Data":"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939"} Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.071824 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerStarted","Data":"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934"} Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.071846 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerStarted","Data":"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7"} Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.074834 4689 generic.go:334] "Generic (PLEG): container finished" podID="15ff2f20-3071-4cf3-80b2-37e3e36d731b" containerID="23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4" exitCode=0 Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.074907 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" event={"ID":"15ff2f20-3071-4cf3-80b2-37e3e36d731b","Type":"ContainerDied","Data":"23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4"} Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.107658 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:48Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.123317 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:48Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.139930 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:48Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.164483 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:48Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.188215 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:48Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.228313 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:48Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.266407 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:48Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.305335 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:48Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.348658 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:48Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.389243 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:48Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.431022 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:48Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.466748 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:48Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.508048 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:48Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.546493 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:48Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:48 crc kubenswrapper[4689]: I1013 21:11:48.867186 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:48 crc kubenswrapper[4689]: E1013 21:11:48.867350 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.080960 4689 generic.go:334] "Generic (PLEG): container finished" podID="15ff2f20-3071-4cf3-80b2-37e3e36d731b" containerID="7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf" exitCode=0 Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.081047 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" event={"ID":"15ff2f20-3071-4cf3-80b2-37e3e36d731b","Type":"ContainerDied","Data":"7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf"} Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.085952 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerStarted","Data":"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef"} Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.086017 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerStarted","Data":"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c"} Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.110019 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:49Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.127305 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:49Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.167159 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:49Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.224322 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:49Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.246050 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:49Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.274404 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:49Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.288732 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:49Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.299162 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:49Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.311533 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:49Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.330535 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:49Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.344952 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:49Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.358472 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:49Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.369667 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:49Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.382961 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:49Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.867080 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.867236 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:11:49 crc kubenswrapper[4689]: E1013 21:11:49.867280 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:11:49 crc kubenswrapper[4689]: E1013 21:11:49.867461 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.971639 4689 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.974100 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.974141 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.974156 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.974306 4689 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.985024 4689 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.985388 4689 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.986919 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.986975 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.986986 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.987005 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:49 crc kubenswrapper[4689]: I1013 21:11:49.987018 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:49Z","lastTransitionTime":"2025-10-13T21:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:50 crc kubenswrapper[4689]: E1013 21:11:50.010509 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.015634 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.015811 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.015903 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.016039 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.016146 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:50Z","lastTransitionTime":"2025-10-13T21:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:50 crc kubenswrapper[4689]: E1013 21:11:50.033393 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.038945 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.038996 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.039014 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.039041 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.039060 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:50Z","lastTransitionTime":"2025-10-13T21:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:50 crc kubenswrapper[4689]: E1013 21:11:50.056484 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.062444 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.062508 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.062523 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.062548 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.062564 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:50Z","lastTransitionTime":"2025-10-13T21:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:50 crc kubenswrapper[4689]: E1013 21:11:50.083456 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.088408 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.088460 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.088475 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.088503 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.088519 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:50Z","lastTransitionTime":"2025-10-13T21:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.094245 4689 generic.go:334] "Generic (PLEG): container finished" podID="15ff2f20-3071-4cf3-80b2-37e3e36d731b" containerID="8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef" exitCode=0 Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.094328 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" event={"ID":"15ff2f20-3071-4cf3-80b2-37e3e36d731b","Type":"ContainerDied","Data":"8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef"} Oct 13 21:11:50 crc kubenswrapper[4689]: E1013 21:11:50.109477 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: E1013 21:11:50.109655 4689 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.112211 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.112309 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.112374 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.112400 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.112436 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.112466 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:50Z","lastTransitionTime":"2025-10-13T21:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.135091 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.149414 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.163118 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.181685 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.197196 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.211444 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.216331 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.216372 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.216383 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.216428 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.216441 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:50Z","lastTransitionTime":"2025-10-13T21:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.234232 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.250874 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.268742 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.287633 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.307987 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.319993 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.320036 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.320047 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.320064 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.320077 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:50Z","lastTransitionTime":"2025-10-13T21:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.324043 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.342080 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.422714 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.422768 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.422786 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.422806 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.422822 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:50Z","lastTransitionTime":"2025-10-13T21:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.526189 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.526232 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.526246 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.526263 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.526276 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:50Z","lastTransitionTime":"2025-10-13T21:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.629079 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.629129 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.629145 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.629167 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.629181 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:50Z","lastTransitionTime":"2025-10-13T21:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.641263 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.648692 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.651263 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.662213 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.681498 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.702389 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.720913 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.733299 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.733353 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.733367 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.733391 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.733407 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:50Z","lastTransitionTime":"2025-10-13T21:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.738116 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.761331 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.789886 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.806569 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.826300 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.836218 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.836267 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.836287 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.836314 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.836335 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:50Z","lastTransitionTime":"2025-10-13T21:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.842700 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.867173 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:50 crc kubenswrapper[4689]: E1013 21:11:50.867500 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.871113 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.890187 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.911688 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.938906 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.938994 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.939022 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.939062 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.939091 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:50Z","lastTransitionTime":"2025-10-13T21:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.948059 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:50 crc kubenswrapper[4689]: I1013 21:11:50.973914 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.003141 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:50Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.040703 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.043304 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.043356 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.043367 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.043384 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.043396 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:51Z","lastTransitionTime":"2025-10-13T21:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.059273 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.076672 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.091389 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.102152 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerStarted","Data":"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029"} Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.105131 4689 generic.go:334] "Generic (PLEG): container finished" podID="15ff2f20-3071-4cf3-80b2-37e3e36d731b" containerID="0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e" exitCode=0 Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.105199 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" event={"ID":"15ff2f20-3071-4cf3-80b2-37e3e36d731b","Type":"ContainerDied","Data":"0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e"} Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.107798 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.131725 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.148805 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.149262 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.149364 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.149458 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.149549 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:51Z","lastTransitionTime":"2025-10-13T21:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.155939 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.180261 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.203498 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.218217 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.234222 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.249922 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.257671 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.257817 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.257836 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.257858 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.257874 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:51Z","lastTransitionTime":"2025-10-13T21:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.269935 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.286324 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.301488 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.315676 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.332434 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.348050 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.360819 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.360887 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.360904 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.360940 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.360959 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:51Z","lastTransitionTime":"2025-10-13T21:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.367731 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.392191 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.406042 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.422984 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.438563 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.450870 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:11:51 crc kubenswrapper[4689]: E1013 21:11:51.451107 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:11:59.451068296 +0000 UTC m=+36.369313381 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.451188 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.451268 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:51 crc kubenswrapper[4689]: E1013 21:11:51.451378 4689 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 21:11:51 crc kubenswrapper[4689]: E1013 21:11:51.451466 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:59.451442274 +0000 UTC m=+36.369687399 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 21:11:51 crc kubenswrapper[4689]: E1013 21:11:51.451481 4689 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 21:11:51 crc kubenswrapper[4689]: E1013 21:11:51.451527 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:59.451519126 +0000 UTC m=+36.369764211 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.452069 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.463857 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.463894 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.463906 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.463924 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.463941 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:51Z","lastTransitionTime":"2025-10-13T21:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.471104 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.491446 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.521319 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.537805 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.552389 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.552455 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:11:51 crc kubenswrapper[4689]: E1013 21:11:51.552661 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 21:11:51 crc kubenswrapper[4689]: E1013 21:11:51.552686 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 21:11:51 crc kubenswrapper[4689]: E1013 21:11:51.552704 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 21:11:51 crc kubenswrapper[4689]: E1013 21:11:51.552718 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 21:11:51 crc kubenswrapper[4689]: E1013 21:11:51.552728 4689 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:51 crc kubenswrapper[4689]: E1013 21:11:51.552738 4689 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:51 crc kubenswrapper[4689]: E1013 21:11:51.552812 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:59.552783034 +0000 UTC m=+36.471028129 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:51 crc kubenswrapper[4689]: E1013 21:11:51.552847 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-13 21:11:59.552824835 +0000 UTC m=+36.471069930 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.566746 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.566786 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.566795 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.566811 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.566823 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:51Z","lastTransitionTime":"2025-10-13T21:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.669930 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.669997 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.670016 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.670044 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.670067 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:51Z","lastTransitionTime":"2025-10-13T21:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.774396 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.774463 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.774487 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.774515 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.774536 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:51Z","lastTransitionTime":"2025-10-13T21:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.867989 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:11:51 crc kubenswrapper[4689]: E1013 21:11:51.868160 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.868908 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:11:51 crc kubenswrapper[4689]: E1013 21:11:51.868994 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.877513 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.878088 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.878115 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.878147 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.878176 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:51Z","lastTransitionTime":"2025-10-13T21:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.982551 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.982623 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.982633 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.982652 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:51 crc kubenswrapper[4689]: I1013 21:11:51.982664 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:51Z","lastTransitionTime":"2025-10-13T21:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.086066 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.086441 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.086581 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.086790 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.086969 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:52Z","lastTransitionTime":"2025-10-13T21:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.113474 4689 generic.go:334] "Generic (PLEG): container finished" podID="15ff2f20-3071-4cf3-80b2-37e3e36d731b" containerID="7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07" exitCode=0 Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.113660 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" event={"ID":"15ff2f20-3071-4cf3-80b2-37e3e36d731b","Type":"ContainerDied","Data":"7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07"} Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.136838 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:52Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.161621 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:52Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.184733 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:52Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.195892 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.195965 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.195994 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.196030 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.196058 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:52Z","lastTransitionTime":"2025-10-13T21:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.207262 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:52Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.235139 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:52Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.250127 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:52Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.277214 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:52Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.296265 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:52Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.299560 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.299645 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.299667 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.299694 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.299715 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:52Z","lastTransitionTime":"2025-10-13T21:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.311911 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:52Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.342142 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:52Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.359052 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:52Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.374562 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:52Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.387664 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:52Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.400498 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:52Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.402842 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.402927 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.402943 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.402974 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.402994 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:52Z","lastTransitionTime":"2025-10-13T21:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.414404 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:52Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.505625 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.505664 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.505674 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.505693 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.505707 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:52Z","lastTransitionTime":"2025-10-13T21:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.608290 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.608340 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.608350 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.608389 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.608401 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:52Z","lastTransitionTime":"2025-10-13T21:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.711626 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.711687 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.711708 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.711739 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.711762 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:52Z","lastTransitionTime":"2025-10-13T21:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.815193 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.815249 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.815263 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.815287 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.815300 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:52Z","lastTransitionTime":"2025-10-13T21:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.866560 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:52 crc kubenswrapper[4689]: E1013 21:11:52.866753 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.918212 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.918241 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.918251 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.918267 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:52 crc kubenswrapper[4689]: I1013 21:11:52.918281 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:52Z","lastTransitionTime":"2025-10-13T21:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.021238 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.021289 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.021300 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.021320 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.021331 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:53Z","lastTransitionTime":"2025-10-13T21:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.123069 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.123119 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.123129 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.123151 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.123166 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:53Z","lastTransitionTime":"2025-10-13T21:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.123949 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerStarted","Data":"19542211ea16f84b402e5f3bb88db7bcf6e9dd9201ce003f7bd888812ea91bde"} Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.124303 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.132859 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" event={"ID":"15ff2f20-3071-4cf3-80b2-37e3e36d731b","Type":"ContainerStarted","Data":"83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992"} Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.141113 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.154950 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.222484 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.226214 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.233052 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.233115 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.233128 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.233148 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.233161 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:53Z","lastTransitionTime":"2025-10-13T21:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.260354 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.276089 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.294504 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.308305 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.320793 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.336472 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.336500 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.336512 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.336532 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.336545 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:53Z","lastTransitionTime":"2025-10-13T21:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.338712 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19542211ea16f84b402e5f3bb88db7bcf6e9dd9201ce003f7bd888812ea91bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.352280 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.363350 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.375148 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.387324 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.399238 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.411457 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.428093 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.439276 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.439314 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.439325 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.439342 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.439355 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:53Z","lastTransitionTime":"2025-10-13T21:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.442441 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.454615 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.467446 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.478166 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.500529 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.512810 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.523289 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.540194 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19542211ea16f84b402e5f3bb88db7bcf6e9dd9201ce003f7bd888812ea91bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.541917 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.541946 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.541955 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.541975 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.541987 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:53Z","lastTransitionTime":"2025-10-13T21:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.552727 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.564983 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.582848 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.595059 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.605049 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.617071 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.645487 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.645543 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.645558 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.645621 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.645640 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:53Z","lastTransitionTime":"2025-10-13T21:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.749655 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.749755 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.749775 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.749836 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.749857 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:53Z","lastTransitionTime":"2025-10-13T21:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.852733 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.852788 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.852807 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.852836 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.852857 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:53Z","lastTransitionTime":"2025-10-13T21:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.866875 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.867293 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:11:53 crc kubenswrapper[4689]: E1013 21:11:53.872005 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:11:53 crc kubenswrapper[4689]: E1013 21:11:53.872202 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.888180 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.901025 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.915485 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.934997 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.956211 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.956258 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.956300 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.956320 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.956338 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:53Z","lastTransitionTime":"2025-10-13T21:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.959781 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.974891 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:53 crc kubenswrapper[4689]: I1013 21:11:53.992605 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.003798 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.016421 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.030508 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.045197 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.059421 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.059485 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.059505 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.059534 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.059557 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:54Z","lastTransitionTime":"2025-10-13T21:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.061066 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.081067 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19542211ea16f84b402e5f3bb88db7bcf6e9dd9201ce003f7bd888812ea91bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.101468 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.115892 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.137323 4689 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.138186 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.163350 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.163421 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.163429 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.163444 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.163471 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:54Z","lastTransitionTime":"2025-10-13T21:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.171296 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.183216 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.201420 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.212964 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.226575 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.239395 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.250746 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.263008 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.268121 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.268174 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.268190 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.268209 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.268224 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:54Z","lastTransitionTime":"2025-10-13T21:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.286307 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19542211ea16f84b402e5f3bb88db7bcf6e9dd9201ce003f7bd888812ea91bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.311346 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.326832 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.342907 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.370137 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.371736 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.371783 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.371795 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.371813 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.371824 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:54Z","lastTransitionTime":"2025-10-13T21:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.406234 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.454074 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.474611 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.474666 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.474680 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.474705 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.474719 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:54Z","lastTransitionTime":"2025-10-13T21:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.490071 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.577239 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.577330 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.577350 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.577381 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.577438 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:54Z","lastTransitionTime":"2025-10-13T21:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.680939 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.681016 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.681037 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.681076 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.681101 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:54Z","lastTransitionTime":"2025-10-13T21:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.784324 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.784385 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.784399 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.784435 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.784451 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:54Z","lastTransitionTime":"2025-10-13T21:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.866611 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:54 crc kubenswrapper[4689]: E1013 21:11:54.867035 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.887470 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.887529 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.887543 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.887569 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.887606 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:54Z","lastTransitionTime":"2025-10-13T21:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.990458 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.990512 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.990525 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.990544 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:54 crc kubenswrapper[4689]: I1013 21:11:54.990557 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:54Z","lastTransitionTime":"2025-10-13T21:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.093274 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.093320 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.093330 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.093347 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.093365 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:55Z","lastTransitionTime":"2025-10-13T21:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.140637 4689 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.196753 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.196823 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.196844 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.196873 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.196892 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:55Z","lastTransitionTime":"2025-10-13T21:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.300404 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.300458 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.300476 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.300503 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.300522 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:55Z","lastTransitionTime":"2025-10-13T21:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.403808 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.403848 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.403858 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.403874 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.403886 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:55Z","lastTransitionTime":"2025-10-13T21:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.507305 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.507362 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.507379 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.507406 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.507428 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:55Z","lastTransitionTime":"2025-10-13T21:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.611148 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.611547 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.611789 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.611975 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.612112 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:55Z","lastTransitionTime":"2025-10-13T21:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.715707 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.715794 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.715857 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.715894 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.715914 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:55Z","lastTransitionTime":"2025-10-13T21:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.819444 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.819510 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.819526 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.819552 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.819576 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:55Z","lastTransitionTime":"2025-10-13T21:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.867128 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:11:55 crc kubenswrapper[4689]: E1013 21:11:55.867350 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.867548 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:11:55 crc kubenswrapper[4689]: E1013 21:11:55.867740 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.922445 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.922499 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.922514 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.922537 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:55 crc kubenswrapper[4689]: I1013 21:11:55.922551 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:55Z","lastTransitionTime":"2025-10-13T21:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.024428 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.024469 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.024479 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.024495 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.024505 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:56Z","lastTransitionTime":"2025-10-13T21:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.127404 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.127464 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.127480 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.127503 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.127520 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:56Z","lastTransitionTime":"2025-10-13T21:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.146658 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovnkube-controller/0.log" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.149843 4689 generic.go:334] "Generic (PLEG): container finished" podID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerID="19542211ea16f84b402e5f3bb88db7bcf6e9dd9201ce003f7bd888812ea91bde" exitCode=1 Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.149912 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerDied","Data":"19542211ea16f84b402e5f3bb88db7bcf6e9dd9201ce003f7bd888812ea91bde"} Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.151506 4689 scope.go:117] "RemoveContainer" containerID="19542211ea16f84b402e5f3bb88db7bcf6e9dd9201ce003f7bd888812ea91bde" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.174988 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:56Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.197727 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:56Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.219671 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:56Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.230862 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.230908 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.230923 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.230941 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.230955 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:56Z","lastTransitionTime":"2025-10-13T21:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.239914 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:56Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.256639 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:56Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.278815 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:56Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.305768 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:56Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.326527 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:56Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.333746 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.333810 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.333830 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.333861 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.333883 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:56Z","lastTransitionTime":"2025-10-13T21:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.348941 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:56Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.367495 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:56Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.382642 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:56Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.410531 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:56Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.429090 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:56Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.437312 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.437392 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.437408 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.437425 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.437437 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:56Z","lastTransitionTime":"2025-10-13T21:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.447889 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:56Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.468803 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19542211ea16f84b402e5f3bb88db7bcf6e9dd9201ce003f7bd888812ea91bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19542211ea16f84b402e5f3bb88db7bcf6e9dd9201ce003f7bd888812ea91bde\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:11:55Z\\\",\\\"message\\\":\\\"or removal\\\\nI1013 21:11:55.519204 5981 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:11:55.519228 5981 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:11:55.519235 5981 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:11:55.519310 5981 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 21:11:55.519329 5981 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 21:11:55.519305 5981 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:11:55.519364 5981 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:11:55.519389 5981 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 21:11:55.519407 5981 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 21:11:55.519398 5981 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:11:55.519407 5981 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 21:11:55.519437 5981 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:11:55.519469 5981 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:11:55.519531 5981 factory.go:656] Stopping watch factory\\\\nI1013 21:11:55.519527 5981 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:11:55.519561 5981 ovnkube.go:599] Stopped ovnkube\\\\nI1013 2\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:56Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.540793 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.540855 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.540871 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.540894 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.540905 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:56Z","lastTransitionTime":"2025-10-13T21:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.644275 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.644686 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.644805 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.644920 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.645017 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:56Z","lastTransitionTime":"2025-10-13T21:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.748557 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.748630 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.748643 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.748664 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.748679 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:56Z","lastTransitionTime":"2025-10-13T21:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.851376 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.851784 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.851863 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.851940 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.852005 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:56Z","lastTransitionTime":"2025-10-13T21:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.866702 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:56 crc kubenswrapper[4689]: E1013 21:11:56.866862 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.954401 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.954457 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.954471 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.954496 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:56 crc kubenswrapper[4689]: I1013 21:11:56.954515 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:56Z","lastTransitionTime":"2025-10-13T21:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.057537 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.057637 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.057655 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.057688 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.057711 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:57Z","lastTransitionTime":"2025-10-13T21:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.163486 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovnkube-controller/0.log" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.164659 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.164736 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.164763 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.164796 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.164817 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:57Z","lastTransitionTime":"2025-10-13T21:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.170125 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerStarted","Data":"20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10"} Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.170434 4689 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.212944 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19542211ea16f84b402e5f3bb88db7bcf6e9dd9201ce003f7bd888812ea91bde\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:11:55Z\\\",\\\"message\\\":\\\"or removal\\\\nI1013 21:11:55.519204 5981 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:11:55.519228 5981 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:11:55.519235 5981 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:11:55.519310 5981 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 21:11:55.519329 5981 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 21:11:55.519305 5981 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:11:55.519364 5981 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:11:55.519389 5981 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 21:11:55.519407 5981 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 21:11:55.519398 5981 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:11:55.519407 5981 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 21:11:55.519437 5981 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:11:55.519469 5981 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:11:55.519531 5981 factory.go:656] Stopping watch factory\\\\nI1013 21:11:55.519527 5981 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:11:55.519561 5981 ovnkube.go:599] Stopped ovnkube\\\\nI1013 2\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:57Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.264791 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:57Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.267019 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.267048 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.267057 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.267072 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.267081 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:57Z","lastTransitionTime":"2025-10-13T21:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.281672 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:57Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.300229 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:57Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.313615 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:57Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.329133 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:57Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.347689 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:57Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.362995 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:57Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.370564 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.370638 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.370648 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.370668 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.370681 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:57Z","lastTransitionTime":"2025-10-13T21:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.383276 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:57Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.402319 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:57Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.417280 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:57Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.438993 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:57Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.461442 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:57Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.474079 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.474287 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.474330 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.474372 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.474400 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:57Z","lastTransitionTime":"2025-10-13T21:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.479317 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:57Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.502855 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:57Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.577777 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.577948 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.577970 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.578133 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.578205 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:57Z","lastTransitionTime":"2025-10-13T21:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.682156 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.682223 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.682240 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.682265 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.682284 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:57Z","lastTransitionTime":"2025-10-13T21:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.787830 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.787892 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.787905 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.787929 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.787945 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:57Z","lastTransitionTime":"2025-10-13T21:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.866832 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.866906 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:11:57 crc kubenswrapper[4689]: E1013 21:11:57.867065 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:11:57 crc kubenswrapper[4689]: E1013 21:11:57.867330 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.899872 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.899923 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.899933 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.899951 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.899962 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:57Z","lastTransitionTime":"2025-10-13T21:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.997170 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt"] Oct 13 21:11:57 crc kubenswrapper[4689]: I1013 21:11:57.998375 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.003678 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.003788 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.003849 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.003934 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.003992 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:58Z","lastTransitionTime":"2025-10-13T21:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.005050 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.005224 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.016338 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.030029 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.050439 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.074462 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.103302 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.107857 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.107917 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.107936 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.107965 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.107987 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:58Z","lastTransitionTime":"2025-10-13T21:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.122848 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.131897 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/55d53b5e-3ce0-4f24-9f15-770b7c23e8e2-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-v6xvt\" (UID: \"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.131964 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgwvz\" (UniqueName: \"kubernetes.io/projected/55d53b5e-3ce0-4f24-9f15-770b7c23e8e2-kube-api-access-xgwvz\") pod \"ovnkube-control-plane-749d76644c-v6xvt\" (UID: \"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.132125 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/55d53b5e-3ce0-4f24-9f15-770b7c23e8e2-env-overrides\") pod \"ovnkube-control-plane-749d76644c-v6xvt\" (UID: \"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.132200 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/55d53b5e-3ce0-4f24-9f15-770b7c23e8e2-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-v6xvt\" (UID: \"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.141700 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.164174 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.174908 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovnkube-controller/1.log" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.176254 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovnkube-controller/0.log" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.180949 4689 generic.go:334] "Generic (PLEG): container finished" podID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerID="20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10" exitCode=1 Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.181014 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerDied","Data":"20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10"} Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.181103 4689 scope.go:117] "RemoveContainer" containerID="19542211ea16f84b402e5f3bb88db7bcf6e9dd9201ce003f7bd888812ea91bde" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.181514 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.182295 4689 scope.go:117] "RemoveContainer" containerID="20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10" Oct 13 21:11:58 crc kubenswrapper[4689]: E1013 21:11:58.182624 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.202439 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.210166 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.210221 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.210234 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.210253 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.210265 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:58Z","lastTransitionTime":"2025-10-13T21:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.222430 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.233959 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/55d53b5e-3ce0-4f24-9f15-770b7c23e8e2-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-v6xvt\" (UID: \"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.234030 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/55d53b5e-3ce0-4f24-9f15-770b7c23e8e2-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-v6xvt\" (UID: \"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.234078 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgwvz\" (UniqueName: \"kubernetes.io/projected/55d53b5e-3ce0-4f24-9f15-770b7c23e8e2-kube-api-access-xgwvz\") pod \"ovnkube-control-plane-749d76644c-v6xvt\" (UID: \"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.234123 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/55d53b5e-3ce0-4f24-9f15-770b7c23e8e2-env-overrides\") pod \"ovnkube-control-plane-749d76644c-v6xvt\" (UID: \"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.234906 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/55d53b5e-3ce0-4f24-9f15-770b7c23e8e2-env-overrides\") pod \"ovnkube-control-plane-749d76644c-v6xvt\" (UID: \"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.235313 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/55d53b5e-3ce0-4f24-9f15-770b7c23e8e2-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-v6xvt\" (UID: \"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.244395 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/55d53b5e-3ce0-4f24-9f15-770b7c23e8e2-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-v6xvt\" (UID: \"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.248673 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19542211ea16f84b402e5f3bb88db7bcf6e9dd9201ce003f7bd888812ea91bde\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:11:55Z\\\",\\\"message\\\":\\\"or removal\\\\nI1013 21:11:55.519204 5981 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:11:55.519228 5981 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:11:55.519235 5981 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:11:55.519310 5981 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 21:11:55.519329 5981 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 21:11:55.519305 5981 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:11:55.519364 5981 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:11:55.519389 5981 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 21:11:55.519407 5981 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 21:11:55.519398 5981 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:11:55.519407 5981 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 21:11:55.519437 5981 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:11:55.519469 5981 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:11:55.519531 5981 factory.go:656] Stopping watch factory\\\\nI1013 21:11:55.519527 5981 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:11:55.519561 5981 ovnkube.go:599] Stopped ovnkube\\\\nI1013 2\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.258000 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgwvz\" (UniqueName: \"kubernetes.io/projected/55d53b5e-3ce0-4f24-9f15-770b7c23e8e2-kube-api-access-xgwvz\") pod \"ovnkube-control-plane-749d76644c-v6xvt\" (UID: \"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.265651 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.296292 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.312690 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.312767 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.313737 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.313800 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.313818 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.313847 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.313866 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:58Z","lastTransitionTime":"2025-10-13T21:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:58 crc kubenswrapper[4689]: W1013 21:11:58.327416 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod55d53b5e_3ce0_4f24_9f15_770b7c23e8e2.slice/crio-187e9d643e50ab114be8c64cf398aafa42fff3c0e7b37c17cc0a143382ae28ce WatchSource:0}: Error finding container 187e9d643e50ab114be8c64cf398aafa42fff3c0e7b37c17cc0a143382ae28ce: Status 404 returned error can't find the container with id 187e9d643e50ab114be8c64cf398aafa42fff3c0e7b37c17cc0a143382ae28ce Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.327816 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.345157 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.366275 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.386246 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.405707 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.426028 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.426076 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.426086 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.426103 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.426112 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:58Z","lastTransitionTime":"2025-10-13T21:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.429704 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.466269 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.496132 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.509695 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.524757 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.529638 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.529698 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.529719 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.529745 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.529765 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:58Z","lastTransitionTime":"2025-10-13T21:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.542920 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.554964 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.580165 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.592937 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.609552 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.633253 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.633303 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.633319 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.633345 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.633364 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:58Z","lastTransitionTime":"2025-10-13T21:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.635774 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19542211ea16f84b402e5f3bb88db7bcf6e9dd9201ce003f7bd888812ea91bde\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:11:55Z\\\",\\\"message\\\":\\\"or removal\\\\nI1013 21:11:55.519204 5981 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:11:55.519228 5981 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:11:55.519235 5981 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:11:55.519310 5981 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 21:11:55.519329 5981 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 21:11:55.519305 5981 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:11:55.519364 5981 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:11:55.519389 5981 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 21:11:55.519407 5981 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 21:11:55.519398 5981 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:11:55.519407 5981 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 21:11:55.519437 5981 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:11:55.519469 5981 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:11:55.519531 5981 factory.go:656] Stopping watch factory\\\\nI1013 21:11:55.519527 5981 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:11:55.519561 5981 ovnkube.go:599] Stopped ovnkube\\\\nI1013 2\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"message\\\":\\\"d (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1013 21:11:57.297541 6103 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 21:11:57.297596 6103 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 21:11:57.297623 6103 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:11:57.297634 6103 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:11:57.297661 6103 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:11:57.297672 6103 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:11:57.297690 6103 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:11:57.297692 6103 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:11:57.297700 6103 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:11:57.297712 6103 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:11:57.297732 6103 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:11:57.297811 6103 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:11:57.297856 6103 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:11:57.297898 6103 factory.go:656] Stopping watch factory\\\\nI1013 21:11:57.297916 6103 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:11:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.652798 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:58Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.762270 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.764300 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.764361 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.764401 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.764428 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.764447 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:58Z","lastTransitionTime":"2025-10-13T21:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.866392 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:58 crc kubenswrapper[4689]: E1013 21:11:58.866574 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.867826 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.867880 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.867895 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.867919 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.867933 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:58Z","lastTransitionTime":"2025-10-13T21:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.971212 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.971286 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.971304 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.971332 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:58 crc kubenswrapper[4689]: I1013 21:11:58.971350 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:58Z","lastTransitionTime":"2025-10-13T21:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.074784 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.074847 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.074866 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.074891 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.074911 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:59Z","lastTransitionTime":"2025-10-13T21:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.178540 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.178659 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.178679 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.178709 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.178729 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:59Z","lastTransitionTime":"2025-10-13T21:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.196220 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovnkube-controller/1.log" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.200862 4689 scope.go:117] "RemoveContainer" containerID="20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10" Oct 13 21:11:59 crc kubenswrapper[4689]: E1013 21:11:59.201301 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.202846 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" event={"ID":"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2","Type":"ContainerStarted","Data":"a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1"} Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.202939 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" event={"ID":"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2","Type":"ContainerStarted","Data":"921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de"} Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.202975 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" event={"ID":"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2","Type":"ContainerStarted","Data":"187e9d643e50ab114be8c64cf398aafa42fff3c0e7b37c17cc0a143382ae28ce"} Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.217023 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.232724 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.254505 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.267746 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.280830 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.282702 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.282774 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.282789 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.282811 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.282827 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:59Z","lastTransitionTime":"2025-10-13T21:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.304704 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.322303 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.341328 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.366106 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.384729 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.385128 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.385191 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.385204 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.385226 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.385244 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:59Z","lastTransitionTime":"2025-10-13T21:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.408201 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.439680 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"message\\\":\\\"d (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1013 21:11:57.297541 6103 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 21:11:57.297596 6103 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 21:11:57.297623 6103 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:11:57.297634 6103 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:11:57.297661 6103 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:11:57.297672 6103 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:11:57.297690 6103 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:11:57.297692 6103 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:11:57.297700 6103 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:11:57.297712 6103 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:11:57.297732 6103 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:11:57.297811 6103 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:11:57.297856 6103 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:11:57.297898 6103 factory.go:656] Stopping watch factory\\\\nI1013 21:11:57.297916 6103 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:11:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.459738 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.459866 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:59 crc kubenswrapper[4689]: E1013 21:11:59.459937 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:12:15.459899771 +0000 UTC m=+52.378144896 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:11:59 crc kubenswrapper[4689]: E1013 21:11:59.460021 4689 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 21:11:59 crc kubenswrapper[4689]: E1013 21:11:59.460101 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 21:12:15.460081835 +0000 UTC m=+52.378326940 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.460128 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:11:59 crc kubenswrapper[4689]: E1013 21:11:59.460190 4689 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 21:11:59 crc kubenswrapper[4689]: E1013 21:11:59.460222 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 21:12:15.460213218 +0000 UTC m=+52.378458313 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.462405 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.489278 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.489330 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.489344 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.489366 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.489381 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:59Z","lastTransitionTime":"2025-10-13T21:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.496644 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.516542 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.535515 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.559933 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.560568 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.560657 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:11:59 crc kubenswrapper[4689]: E1013 21:11:59.560842 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 21:11:59 crc kubenswrapper[4689]: E1013 21:11:59.560871 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 21:11:59 crc kubenswrapper[4689]: E1013 21:11:59.560886 4689 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:59 crc kubenswrapper[4689]: E1013 21:11:59.560949 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-13 21:12:15.560930333 +0000 UTC m=+52.479175428 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:59 crc kubenswrapper[4689]: E1013 21:11:59.561023 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 21:11:59 crc kubenswrapper[4689]: E1013 21:11:59.561122 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 21:11:59 crc kubenswrapper[4689]: E1013 21:11:59.561150 4689 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:59 crc kubenswrapper[4689]: E1013 21:11:59.561262 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-13 21:12:15.56122978 +0000 UTC m=+52.479475055 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.581012 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.592694 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.592753 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.592769 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.592794 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.592817 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:59Z","lastTransitionTime":"2025-10-13T21:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.602084 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.616233 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.636659 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.649456 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.675474 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.693493 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.695891 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.695965 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.695984 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.696018 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.696039 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:59Z","lastTransitionTime":"2025-10-13T21:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.712915 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.736426 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"message\\\":\\\"d (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1013 21:11:57.297541 6103 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 21:11:57.297596 6103 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 21:11:57.297623 6103 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:11:57.297634 6103 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:11:57.297661 6103 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:11:57.297672 6103 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:11:57.297690 6103 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:11:57.297692 6103 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:11:57.297700 6103 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:11:57.297712 6103 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:11:57.297732 6103 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:11:57.297811 6103 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:11:57.297856 6103 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:11:57.297898 6103 factory.go:656] Stopping watch factory\\\\nI1013 21:11:57.297916 6103 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:11:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.750753 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.766467 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.782887 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.799110 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.799156 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.799167 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.799185 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.799196 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:59Z","lastTransitionTime":"2025-10-13T21:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.801371 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.817243 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.832343 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.866839 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.866916 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:11:59 crc kubenswrapper[4689]: E1013 21:11:59.867003 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:11:59 crc kubenswrapper[4689]: E1013 21:11:59.867460 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.903169 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.903230 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.903250 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.903282 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.903303 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:11:59Z","lastTransitionTime":"2025-10-13T21:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.916541 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-nffnw"] Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.917449 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:11:59 crc kubenswrapper[4689]: E1013 21:11:59.917572 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.935654 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.953007 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.976060 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"message\\\":\\\"d (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1013 21:11:57.297541 6103 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 21:11:57.297596 6103 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 21:11:57.297623 6103 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:11:57.297634 6103 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:11:57.297661 6103 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:11:57.297672 6103 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:11:57.297690 6103 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:11:57.297692 6103 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:11:57.297700 6103 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:11:57.297712 6103 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:11:57.297732 6103 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:11:57.297811 6103 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:11:57.297856 6103 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:11:57.297898 6103 factory.go:656] Stopping watch factory\\\\nI1013 21:11:57.297916 6103 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:11:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:11:59 crc kubenswrapper[4689]: I1013 21:11:59.994394 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:11:59Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.007294 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.007408 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.007428 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.007450 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.007492 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:00Z","lastTransitionTime":"2025-10-13T21:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.028028 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.055323 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.065618 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs\") pod \"network-metrics-daemon-nffnw\" (UID: \"3f5bb2ee-abeb-4342-929a-d61e89f30351\") " pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.065934 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6l477\" (UniqueName: \"kubernetes.io/projected/3f5bb2ee-abeb-4342-929a-d61e89f30351-kube-api-access-6l477\") pod \"network-metrics-daemon-nffnw\" (UID: \"3f5bb2ee-abeb-4342-929a-d61e89f30351\") " pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.074403 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.096498 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.111392 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.111453 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.111469 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.111493 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.111507 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:00Z","lastTransitionTime":"2025-10-13T21:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.118256 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.140498 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.161473 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nffnw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5bb2ee-abeb-4342-929a-d61e89f30351\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nffnw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.167117 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs\") pod \"network-metrics-daemon-nffnw\" (UID: \"3f5bb2ee-abeb-4342-929a-d61e89f30351\") " pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.167221 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6l477\" (UniqueName: \"kubernetes.io/projected/3f5bb2ee-abeb-4342-929a-d61e89f30351-kube-api-access-6l477\") pod \"network-metrics-daemon-nffnw\" (UID: \"3f5bb2ee-abeb-4342-929a-d61e89f30351\") " pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:00 crc kubenswrapper[4689]: E1013 21:12:00.167340 4689 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 21:12:00 crc kubenswrapper[4689]: E1013 21:12:00.167466 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs podName:3f5bb2ee-abeb-4342-929a-d61e89f30351 nodeName:}" failed. No retries permitted until 2025-10-13 21:12:00.667433136 +0000 UTC m=+37.585678261 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs") pod "network-metrics-daemon-nffnw" (UID: "3f5bb2ee-abeb-4342-929a-d61e89f30351") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.179453 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.187766 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6l477\" (UniqueName: \"kubernetes.io/projected/3f5bb2ee-abeb-4342-929a-d61e89f30351-kube-api-access-6l477\") pod \"network-metrics-daemon-nffnw\" (UID: \"3f5bb2ee-abeb-4342-929a-d61e89f30351\") " pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.199254 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.208081 4689 scope.go:117] "RemoveContainer" containerID="20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10" Oct 13 21:12:00 crc kubenswrapper[4689]: E1013 21:12:00.208347 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.214753 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.214815 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.214831 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.214857 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.214873 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:00Z","lastTransitionTime":"2025-10-13T21:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.219240 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.237490 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.250352 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.250527 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.250387 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.250664 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.250907 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.250950 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:00Z","lastTransitionTime":"2025-10-13T21:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.268804 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: E1013 21:12:00.269339 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.274990 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.275043 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.275065 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.275088 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.275106 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:00Z","lastTransitionTime":"2025-10-13T21:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:00 crc kubenswrapper[4689]: E1013 21:12:00.288523 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.293613 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.293664 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.293677 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.293705 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.293719 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:00Z","lastTransitionTime":"2025-10-13T21:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:00 crc kubenswrapper[4689]: E1013 21:12:00.313556 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.318609 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.318671 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.318688 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.318717 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.318733 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:00Z","lastTransitionTime":"2025-10-13T21:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:00 crc kubenswrapper[4689]: E1013 21:12:00.332627 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.338240 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.338286 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.338300 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.338322 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.338339 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:00Z","lastTransitionTime":"2025-10-13T21:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:00 crc kubenswrapper[4689]: E1013 21:12:00.358143 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:00Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:00 crc kubenswrapper[4689]: E1013 21:12:00.358385 4689 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.360488 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.360563 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.360617 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.360659 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.360742 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:00Z","lastTransitionTime":"2025-10-13T21:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.464504 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.464575 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.464658 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.464708 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.464727 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:00Z","lastTransitionTime":"2025-10-13T21:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.568843 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.568915 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.568945 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.568976 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.569181 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:00Z","lastTransitionTime":"2025-10-13T21:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.673127 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.673205 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.673222 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.673251 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.673271 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:00Z","lastTransitionTime":"2025-10-13T21:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.673392 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs\") pod \"network-metrics-daemon-nffnw\" (UID: \"3f5bb2ee-abeb-4342-929a-d61e89f30351\") " pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:00 crc kubenswrapper[4689]: E1013 21:12:00.673643 4689 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 21:12:00 crc kubenswrapper[4689]: E1013 21:12:00.673734 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs podName:3f5bb2ee-abeb-4342-929a-d61e89f30351 nodeName:}" failed. No retries permitted until 2025-10-13 21:12:01.673704585 +0000 UTC m=+38.591949700 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs") pod "network-metrics-daemon-nffnw" (UID: "3f5bb2ee-abeb-4342-929a-d61e89f30351") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.781857 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.781954 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.781974 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.782264 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.782420 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:00Z","lastTransitionTime":"2025-10-13T21:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.867307 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:00 crc kubenswrapper[4689]: E1013 21:12:00.867658 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.886367 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.886422 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.886433 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.886451 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.886468 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:00Z","lastTransitionTime":"2025-10-13T21:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.989698 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.989762 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.989780 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.989807 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:00 crc kubenswrapper[4689]: I1013 21:12:00.989827 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:00Z","lastTransitionTime":"2025-10-13T21:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.092918 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.092985 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.093002 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.093026 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.093045 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:01Z","lastTransitionTime":"2025-10-13T21:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.196652 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.196704 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.196713 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.196730 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.196740 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:01Z","lastTransitionTime":"2025-10-13T21:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.299832 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.299904 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.299922 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.299981 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.300005 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:01Z","lastTransitionTime":"2025-10-13T21:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.403437 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.403488 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.403498 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.403515 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.403524 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:01Z","lastTransitionTime":"2025-10-13T21:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.507542 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.507634 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.507650 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.507674 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.507688 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:01Z","lastTransitionTime":"2025-10-13T21:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.611233 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.611324 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.611342 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.611365 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.611384 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:01Z","lastTransitionTime":"2025-10-13T21:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.688975 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs\") pod \"network-metrics-daemon-nffnw\" (UID: \"3f5bb2ee-abeb-4342-929a-d61e89f30351\") " pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:01 crc kubenswrapper[4689]: E1013 21:12:01.689244 4689 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 21:12:01 crc kubenswrapper[4689]: E1013 21:12:01.689344 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs podName:3f5bb2ee-abeb-4342-929a-d61e89f30351 nodeName:}" failed. No retries permitted until 2025-10-13 21:12:03.689315242 +0000 UTC m=+40.607560367 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs") pod "network-metrics-daemon-nffnw" (UID: "3f5bb2ee-abeb-4342-929a-d61e89f30351") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.715194 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.715295 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.715334 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.715371 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.715400 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:01Z","lastTransitionTime":"2025-10-13T21:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.818869 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.818935 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.818955 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.818980 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.819002 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:01Z","lastTransitionTime":"2025-10-13T21:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.867444 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.867506 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:01 crc kubenswrapper[4689]: E1013 21:12:01.867646 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.867444 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:01 crc kubenswrapper[4689]: E1013 21:12:01.867775 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:01 crc kubenswrapper[4689]: E1013 21:12:01.867907 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.922418 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.922486 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.922506 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.922533 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:01 crc kubenswrapper[4689]: I1013 21:12:01.922561 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:01Z","lastTransitionTime":"2025-10-13T21:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.026101 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.026172 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.026190 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.026215 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.026234 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:02Z","lastTransitionTime":"2025-10-13T21:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.129238 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.129311 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.129330 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.129358 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.129387 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:02Z","lastTransitionTime":"2025-10-13T21:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.232352 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.232412 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.232426 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.232447 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.232462 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:02Z","lastTransitionTime":"2025-10-13T21:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.335683 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.335785 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.335804 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.335832 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.335851 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:02Z","lastTransitionTime":"2025-10-13T21:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.439402 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.439488 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.439506 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.439537 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.439557 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:02Z","lastTransitionTime":"2025-10-13T21:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.543341 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.543419 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.543443 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.543474 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.543495 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:02Z","lastTransitionTime":"2025-10-13T21:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.646863 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.646954 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.646977 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.647016 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.647044 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:02Z","lastTransitionTime":"2025-10-13T21:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.751358 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.751446 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.751470 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.751502 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.751561 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:02Z","lastTransitionTime":"2025-10-13T21:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.855764 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.855832 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.855852 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.855879 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.855898 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:02Z","lastTransitionTime":"2025-10-13T21:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.867085 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:02 crc kubenswrapper[4689]: E1013 21:12:02.867280 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.959279 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.959328 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.959339 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.959357 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:02 crc kubenswrapper[4689]: I1013 21:12:02.959372 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:02Z","lastTransitionTime":"2025-10-13T21:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.063741 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.063815 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.063839 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.063869 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.063889 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:03Z","lastTransitionTime":"2025-10-13T21:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.167646 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.167696 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.167707 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.167727 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.167748 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:03Z","lastTransitionTime":"2025-10-13T21:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.273564 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.273686 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.273713 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.273747 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.273783 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:03Z","lastTransitionTime":"2025-10-13T21:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.378276 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.378346 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.378371 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.378406 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.378429 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:03Z","lastTransitionTime":"2025-10-13T21:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.467829 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.481302 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.481541 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.481705 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.481834 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.481986 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:03Z","lastTransitionTime":"2025-10-13T21:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.492717 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.513225 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.539700 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.557791 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.582019 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.585248 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.585326 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.585347 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.585376 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.585394 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:03Z","lastTransitionTime":"2025-10-13T21:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.606874 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.629375 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.650385 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.683437 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"message\\\":\\\"d (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1013 21:11:57.297541 6103 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 21:11:57.297596 6103 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 21:11:57.297623 6103 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:11:57.297634 6103 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:11:57.297661 6103 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:11:57.297672 6103 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:11:57.297690 6103 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:11:57.297692 6103 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:11:57.297700 6103 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:11:57.297712 6103 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:11:57.297732 6103 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:11:57.297811 6103 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:11:57.297856 6103 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:11:57.297898 6103 factory.go:656] Stopping watch factory\\\\nI1013 21:11:57.297916 6103 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:11:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.688498 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.688547 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.688558 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.688576 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.688603 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:03Z","lastTransitionTime":"2025-10-13T21:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.700162 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.725373 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.734287 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs\") pod \"network-metrics-daemon-nffnw\" (UID: \"3f5bb2ee-abeb-4342-929a-d61e89f30351\") " pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:03 crc kubenswrapper[4689]: E1013 21:12:03.734461 4689 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 21:12:03 crc kubenswrapper[4689]: E1013 21:12:03.734567 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs podName:3f5bb2ee-abeb-4342-929a-d61e89f30351 nodeName:}" failed. No retries permitted until 2025-10-13 21:12:07.73453904 +0000 UTC m=+44.652784165 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs") pod "network-metrics-daemon-nffnw" (UID: "3f5bb2ee-abeb-4342-929a-d61e89f30351") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.746793 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.768371 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.792159 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.792197 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.792261 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.792278 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.792299 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.792311 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:03Z","lastTransitionTime":"2025-10-13T21:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.809793 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.829806 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nffnw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5bb2ee-abeb-4342-929a-d61e89f30351\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nffnw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.846987 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.866787 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.866839 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:03 crc kubenswrapper[4689]: E1013 21:12:03.866931 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.866787 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:03 crc kubenswrapper[4689]: E1013 21:12:03.867048 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:03 crc kubenswrapper[4689]: E1013 21:12:03.867098 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.895326 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.895370 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.895380 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.895397 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.895416 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:03Z","lastTransitionTime":"2025-10-13T21:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.899759 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.922138 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.944010 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.974288 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"message\\\":\\\"d (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1013 21:11:57.297541 6103 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 21:11:57.297596 6103 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 21:11:57.297623 6103 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:11:57.297634 6103 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:11:57.297661 6103 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:11:57.297672 6103 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:11:57.297690 6103 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:11:57.297692 6103 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:11:57.297700 6103 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:11:57.297712 6103 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:11:57.297732 6103 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:11:57.297811 6103 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:11:57.297856 6103 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:11:57.297898 6103 factory.go:656] Stopping watch factory\\\\nI1013 21:11:57.297916 6103 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:11:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.990695 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:03Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.997391 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.997444 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.997456 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.997474 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:03 crc kubenswrapper[4689]: I1013 21:12:03.997486 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:03Z","lastTransitionTime":"2025-10-13T21:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.008806 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nffnw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5bb2ee-abeb-4342-929a-d61e89f30351\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nffnw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:04Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.024417 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:04Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.043518 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:04Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.061061 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:04Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.081809 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:04Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.097099 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:04Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.100218 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.100253 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.100266 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.100286 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.100300 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:04Z","lastTransitionTime":"2025-10-13T21:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.115706 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:04Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.137604 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:04Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.153653 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:04Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.168081 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:04Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.185848 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:04Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.198180 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:04Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.203108 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.203180 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.203201 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.203228 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.203246 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:04Z","lastTransitionTime":"2025-10-13T21:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.306093 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.306166 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.306184 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.306211 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.306229 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:04Z","lastTransitionTime":"2025-10-13T21:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.409104 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.409158 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.409170 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.409188 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.409200 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:04Z","lastTransitionTime":"2025-10-13T21:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.512770 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.512840 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.512854 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.512878 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.512892 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:04Z","lastTransitionTime":"2025-10-13T21:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.616300 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.616429 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.616456 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.616492 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.616520 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:04Z","lastTransitionTime":"2025-10-13T21:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.719672 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.719775 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.719800 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.719833 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.719860 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:04Z","lastTransitionTime":"2025-10-13T21:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.824037 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.824110 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.824123 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.824148 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.824161 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:04Z","lastTransitionTime":"2025-10-13T21:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.866858 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:04 crc kubenswrapper[4689]: E1013 21:12:04.867115 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.928014 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.928097 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.928111 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.928138 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:04 crc kubenswrapper[4689]: I1013 21:12:04.928159 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:04Z","lastTransitionTime":"2025-10-13T21:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.031748 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.031887 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.031920 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.031956 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.031981 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:05Z","lastTransitionTime":"2025-10-13T21:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.135429 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.135500 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.135518 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.135545 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.135561 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:05Z","lastTransitionTime":"2025-10-13T21:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.239545 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.239685 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.239713 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.239748 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.239783 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:05Z","lastTransitionTime":"2025-10-13T21:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.342723 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.342806 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.342826 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.342856 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.342874 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:05Z","lastTransitionTime":"2025-10-13T21:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.445418 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.445471 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.445482 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.445502 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.445513 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:05Z","lastTransitionTime":"2025-10-13T21:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.548634 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.548680 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.548691 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.548709 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.548721 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:05Z","lastTransitionTime":"2025-10-13T21:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.652298 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.652355 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.652369 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.652392 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.652406 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:05Z","lastTransitionTime":"2025-10-13T21:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.755228 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.755274 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.755284 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.755300 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.755310 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:05Z","lastTransitionTime":"2025-10-13T21:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.857694 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.857840 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.857860 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.857893 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.857915 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:05Z","lastTransitionTime":"2025-10-13T21:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.867253 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.867247 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.867260 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:05 crc kubenswrapper[4689]: E1013 21:12:05.867500 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:05 crc kubenswrapper[4689]: E1013 21:12:05.867724 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:05 crc kubenswrapper[4689]: E1013 21:12:05.867989 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.962113 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.962209 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.962229 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.962261 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:05 crc kubenswrapper[4689]: I1013 21:12:05.962282 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:05Z","lastTransitionTime":"2025-10-13T21:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.065470 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.065533 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.065548 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.065574 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.065615 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:06Z","lastTransitionTime":"2025-10-13T21:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.169887 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.170034 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.170048 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.170073 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.170107 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:06Z","lastTransitionTime":"2025-10-13T21:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.272680 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.272736 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.272748 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.272769 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.272782 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:06Z","lastTransitionTime":"2025-10-13T21:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.376535 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.376638 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.376663 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.376742 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.376770 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:06Z","lastTransitionTime":"2025-10-13T21:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.480389 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.480454 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.480478 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.480503 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.480521 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:06Z","lastTransitionTime":"2025-10-13T21:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.583173 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.583234 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.583248 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.583270 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.583285 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:06Z","lastTransitionTime":"2025-10-13T21:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.686494 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.686544 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.686553 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.686574 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.686865 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:06Z","lastTransitionTime":"2025-10-13T21:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.789928 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.789979 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.789988 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.790005 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.790015 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:06Z","lastTransitionTime":"2025-10-13T21:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.866784 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:06 crc kubenswrapper[4689]: E1013 21:12:06.866927 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.892880 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.892944 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.892957 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.892979 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.892999 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:06Z","lastTransitionTime":"2025-10-13T21:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.996166 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.996225 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.996236 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.996253 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:06 crc kubenswrapper[4689]: I1013 21:12:06.996263 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:06Z","lastTransitionTime":"2025-10-13T21:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.099082 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.099151 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.099165 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.099184 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.099197 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:07Z","lastTransitionTime":"2025-10-13T21:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.202724 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.202782 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.202794 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.202820 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.202835 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:07Z","lastTransitionTime":"2025-10-13T21:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.306162 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.306264 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.306290 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.306328 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.306354 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:07Z","lastTransitionTime":"2025-10-13T21:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.410443 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.410515 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.410536 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.410565 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.410624 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:07Z","lastTransitionTime":"2025-10-13T21:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.513113 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.513199 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.513233 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.513305 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.513343 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:07Z","lastTransitionTime":"2025-10-13T21:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.616809 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.616874 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.616891 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.616916 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.616935 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:07Z","lastTransitionTime":"2025-10-13T21:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.725297 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.725475 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.725704 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.725748 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.725794 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:07Z","lastTransitionTime":"2025-10-13T21:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.774836 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs\") pod \"network-metrics-daemon-nffnw\" (UID: \"3f5bb2ee-abeb-4342-929a-d61e89f30351\") " pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:07 crc kubenswrapper[4689]: E1013 21:12:07.775110 4689 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 21:12:07 crc kubenswrapper[4689]: E1013 21:12:07.775246 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs podName:3f5bb2ee-abeb-4342-929a-d61e89f30351 nodeName:}" failed. No retries permitted until 2025-10-13 21:12:15.77521274 +0000 UTC m=+52.693457865 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs") pod "network-metrics-daemon-nffnw" (UID: "3f5bb2ee-abeb-4342-929a-d61e89f30351") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.831096 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.831161 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.831183 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.831211 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.831231 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:07Z","lastTransitionTime":"2025-10-13T21:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.867413 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.867482 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.867623 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:07 crc kubenswrapper[4689]: E1013 21:12:07.867778 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:07 crc kubenswrapper[4689]: E1013 21:12:07.867956 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:07 crc kubenswrapper[4689]: E1013 21:12:07.868104 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.935158 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.935219 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.935242 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.935274 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:07 crc kubenswrapper[4689]: I1013 21:12:07.935297 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:07Z","lastTransitionTime":"2025-10-13T21:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.038428 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.038483 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.038695 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.038723 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.038745 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:08Z","lastTransitionTime":"2025-10-13T21:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.141295 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.141364 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.141381 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.141407 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.141426 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:08Z","lastTransitionTime":"2025-10-13T21:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.245672 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.245735 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.245756 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.245783 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.245806 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:08Z","lastTransitionTime":"2025-10-13T21:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.349153 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.349227 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.349246 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.349276 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.349298 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:08Z","lastTransitionTime":"2025-10-13T21:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.452056 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.452115 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.452127 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.452143 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.452155 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:08Z","lastTransitionTime":"2025-10-13T21:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.555564 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.555702 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.555721 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.555744 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.555758 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:08Z","lastTransitionTime":"2025-10-13T21:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.659766 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.659841 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.659870 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.659902 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.659928 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:08Z","lastTransitionTime":"2025-10-13T21:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.763375 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.763533 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.763564 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.763642 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.763668 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:08Z","lastTransitionTime":"2025-10-13T21:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.866446 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:08 crc kubenswrapper[4689]: E1013 21:12:08.866719 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.867028 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.867097 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.867123 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.867155 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.867178 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:08Z","lastTransitionTime":"2025-10-13T21:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.970916 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.970989 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.971017 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.971053 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:08 crc kubenswrapper[4689]: I1013 21:12:08.971083 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:08Z","lastTransitionTime":"2025-10-13T21:12:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.075366 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.075441 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.075460 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.075491 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.075511 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:09Z","lastTransitionTime":"2025-10-13T21:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.179544 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.179621 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.179637 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.179665 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.179682 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:09Z","lastTransitionTime":"2025-10-13T21:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.283247 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.283325 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.283349 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.283381 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.283405 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:09Z","lastTransitionTime":"2025-10-13T21:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.386939 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.387000 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.387017 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.387040 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.387058 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:09Z","lastTransitionTime":"2025-10-13T21:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.490968 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.491038 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.491056 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.491081 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.491098 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:09Z","lastTransitionTime":"2025-10-13T21:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.595112 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.595224 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.595245 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.595281 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.595301 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:09Z","lastTransitionTime":"2025-10-13T21:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.699155 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.699252 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.699289 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.699325 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.699354 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:09Z","lastTransitionTime":"2025-10-13T21:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.803414 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.803481 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.803499 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.803527 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.803548 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:09Z","lastTransitionTime":"2025-10-13T21:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.866759 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.866902 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.866759 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:09 crc kubenswrapper[4689]: E1013 21:12:09.867106 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:09 crc kubenswrapper[4689]: E1013 21:12:09.867315 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:09 crc kubenswrapper[4689]: E1013 21:12:09.867570 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.909991 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.910073 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.910119 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.910154 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:09 crc kubenswrapper[4689]: I1013 21:12:09.910181 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:09Z","lastTransitionTime":"2025-10-13T21:12:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.013997 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.014069 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.014087 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.014117 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.014138 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:10Z","lastTransitionTime":"2025-10-13T21:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.117530 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.117644 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.117669 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.117702 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.117728 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:10Z","lastTransitionTime":"2025-10-13T21:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.221435 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.221505 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.221529 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.221564 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.221637 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:10Z","lastTransitionTime":"2025-10-13T21:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.325248 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.325356 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.325376 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.325407 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.325427 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:10Z","lastTransitionTime":"2025-10-13T21:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.428445 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.428508 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.428522 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.428542 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.428556 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:10Z","lastTransitionTime":"2025-10-13T21:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.522551 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.522638 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.522650 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.522671 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.522687 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:10Z","lastTransitionTime":"2025-10-13T21:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:10 crc kubenswrapper[4689]: E1013 21:12:10.539885 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:10Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.545055 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.545122 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.545136 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.545159 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.545175 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:10Z","lastTransitionTime":"2025-10-13T21:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:10 crc kubenswrapper[4689]: E1013 21:12:10.562492 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:10Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.567499 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.567555 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.567578 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.567627 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.567647 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:10Z","lastTransitionTime":"2025-10-13T21:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:10 crc kubenswrapper[4689]: E1013 21:12:10.585541 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:10Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.590392 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.590468 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.590488 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.590517 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.590537 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:10Z","lastTransitionTime":"2025-10-13T21:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:10 crc kubenswrapper[4689]: E1013 21:12:10.608295 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:10Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.613558 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.613661 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.613681 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.613708 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.613731 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:10Z","lastTransitionTime":"2025-10-13T21:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:10 crc kubenswrapper[4689]: E1013 21:12:10.634290 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:10Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:10 crc kubenswrapper[4689]: E1013 21:12:10.634434 4689 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.636195 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.636243 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.636262 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.636284 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.636301 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:10Z","lastTransitionTime":"2025-10-13T21:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.740042 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.740115 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.740138 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.740171 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.740198 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:10Z","lastTransitionTime":"2025-10-13T21:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.843740 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.843845 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.843901 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.843928 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.843946 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:10Z","lastTransitionTime":"2025-10-13T21:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.867528 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:10 crc kubenswrapper[4689]: E1013 21:12:10.867828 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.947742 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.947821 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.947838 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.947871 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:10 crc kubenswrapper[4689]: I1013 21:12:10.947894 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:10Z","lastTransitionTime":"2025-10-13T21:12:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.052024 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.052079 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.052093 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.052116 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.052129 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:11Z","lastTransitionTime":"2025-10-13T21:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.155391 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.155452 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.155467 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.155500 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.155557 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:11Z","lastTransitionTime":"2025-10-13T21:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.259176 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.259234 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.259249 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.259270 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.259285 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:11Z","lastTransitionTime":"2025-10-13T21:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.362704 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.362814 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.362846 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.362885 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.362913 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:11Z","lastTransitionTime":"2025-10-13T21:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.466622 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.466672 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.466716 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.466737 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.466751 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:11Z","lastTransitionTime":"2025-10-13T21:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.570697 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.570800 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.570824 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.570858 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.570880 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:11Z","lastTransitionTime":"2025-10-13T21:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.673983 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.674043 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.674059 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.674082 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.674104 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:11Z","lastTransitionTime":"2025-10-13T21:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.777873 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.777945 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.777965 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.777996 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.778014 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:11Z","lastTransitionTime":"2025-10-13T21:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.866998 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.867129 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:11 crc kubenswrapper[4689]: E1013 21:12:11.867245 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.867269 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:11 crc kubenswrapper[4689]: E1013 21:12:11.867666 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:11 crc kubenswrapper[4689]: E1013 21:12:11.867796 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.882559 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.882633 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.882647 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.882665 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.882678 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:11Z","lastTransitionTime":"2025-10-13T21:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.986259 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.986334 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.986354 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.986381 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:11 crc kubenswrapper[4689]: I1013 21:12:11.986405 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:11Z","lastTransitionTime":"2025-10-13T21:12:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.089900 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.089968 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.089985 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.090012 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.090031 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:12Z","lastTransitionTime":"2025-10-13T21:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.193088 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.193169 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.193195 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.193233 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.193258 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:12Z","lastTransitionTime":"2025-10-13T21:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.296414 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.296497 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.296521 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.296552 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.296579 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:12Z","lastTransitionTime":"2025-10-13T21:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.399778 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.399843 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.399858 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.399885 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.399900 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:12Z","lastTransitionTime":"2025-10-13T21:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.505122 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.505200 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.505218 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.505251 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.505271 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:12Z","lastTransitionTime":"2025-10-13T21:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.608787 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.608868 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.608892 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.608923 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.608955 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:12Z","lastTransitionTime":"2025-10-13T21:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.674684 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.687003 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.711379 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:12Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.713036 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.713091 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.713114 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.713148 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.713176 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:12Z","lastTransitionTime":"2025-10-13T21:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.733805 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:12Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.754476 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:12Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.788284 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"message\\\":\\\"d (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1013 21:11:57.297541 6103 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 21:11:57.297596 6103 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 21:11:57.297623 6103 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:11:57.297634 6103 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:11:57.297661 6103 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:11:57.297672 6103 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:11:57.297690 6103 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:11:57.297692 6103 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:11:57.297700 6103 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:11:57.297712 6103 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:11:57.297732 6103 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:11:57.297811 6103 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:11:57.297856 6103 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:11:57.297898 6103 factory.go:656] Stopping watch factory\\\\nI1013 21:11:57.297916 6103 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:11:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:12Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.808034 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:12Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.816710 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.816832 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.816860 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.816899 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.816924 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:12Z","lastTransitionTime":"2025-10-13T21:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.826828 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nffnw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5bb2ee-abeb-4342-929a-d61e89f30351\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nffnw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:12Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.848218 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:12Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.866303 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:12Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.866898 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:12 crc kubenswrapper[4689]: E1013 21:12:12.867255 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.868218 4689 scope.go:117] "RemoveContainer" containerID="20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.884441 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:12Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.905865 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:12Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.920526 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.921096 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.921114 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.921137 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.921155 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:12Z","lastTransitionTime":"2025-10-13T21:12:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.921254 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:12Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.945369 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:12Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.962516 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:12Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.975994 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:12Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:12 crc kubenswrapper[4689]: I1013 21:12:12.988793 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:12Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.003939 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.016076 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.024649 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.024699 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.024714 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.024732 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.024744 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:13Z","lastTransitionTime":"2025-10-13T21:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.127659 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.127724 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.127745 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.127771 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.127790 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:13Z","lastTransitionTime":"2025-10-13T21:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.230432 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.230781 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.230803 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.230839 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.230863 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:13Z","lastTransitionTime":"2025-10-13T21:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.280248 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovnkube-controller/1.log" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.283293 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerStarted","Data":"013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f"} Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.284301 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.301043 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.318849 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.334028 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.334085 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.334108 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.334135 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.334153 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:13Z","lastTransitionTime":"2025-10-13T21:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.343863 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.363280 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.388544 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.414283 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.437117 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.437190 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.437199 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.437219 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.437229 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:13Z","lastTransitionTime":"2025-10-13T21:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.446302 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.461950 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3cfd1e01-432a-479f-a783-6b9e1acfcb16\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04aa23baca3c99a463fd363ffaf50dc400529a0a3d4af30a55b2353eb8e2dd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c0a408a02f1929bf23ad55f145eee897886adc62cc66804f18387d36891c85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af51bb8f542b6d1141422c7d41cd987e860d572c385b90d3f38f5f57cbd902c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.477512 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.491476 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.515277 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"message\\\":\\\"d (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1013 21:11:57.297541 6103 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 21:11:57.297596 6103 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 21:11:57.297623 6103 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:11:57.297634 6103 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:11:57.297661 6103 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:11:57.297672 6103 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:11:57.297690 6103 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:11:57.297692 6103 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:11:57.297700 6103 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:11:57.297712 6103 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:11:57.297732 6103 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:11:57.297811 6103 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:11:57.297856 6103 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:11:57.297898 6103 factory.go:656] Stopping watch factory\\\\nI1013 21:11:57.297916 6103 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:11:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:12:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.530092 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.539870 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.539935 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.539953 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.539973 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.539984 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:13Z","lastTransitionTime":"2025-10-13T21:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.544206 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nffnw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5bb2ee-abeb-4342-929a-d61e89f30351\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nffnw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.561523 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.579910 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.597134 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.612604 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.624289 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.642113 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.642158 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.642167 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.642183 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.642193 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:13Z","lastTransitionTime":"2025-10-13T21:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.744929 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.744973 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.744984 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.745005 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.745016 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:13Z","lastTransitionTime":"2025-10-13T21:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.846749 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.846803 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.846817 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.846838 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.846850 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:13Z","lastTransitionTime":"2025-10-13T21:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.867501 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.867516 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.867543 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:13 crc kubenswrapper[4689]: E1013 21:12:13.867834 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:13 crc kubenswrapper[4689]: E1013 21:12:13.867978 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:13 crc kubenswrapper[4689]: E1013 21:12:13.868162 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.884368 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.906860 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.920051 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.936108 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.948840 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.948874 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.948884 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.948900 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.948910 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:13Z","lastTransitionTime":"2025-10-13T21:12:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.949045 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.963901 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.977470 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:13 crc kubenswrapper[4689]: I1013 21:12:13.996174 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"message\\\":\\\"d (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1013 21:11:57.297541 6103 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 21:11:57.297596 6103 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 21:11:57.297623 6103 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:11:57.297634 6103 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:11:57.297661 6103 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:11:57.297672 6103 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:11:57.297690 6103 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:11:57.297692 6103 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:11:57.297700 6103 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:11:57.297712 6103 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:11:57.297732 6103 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:11:57.297811 6103 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:11:57.297856 6103 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:11:57.297898 6103 factory.go:656] Stopping watch factory\\\\nI1013 21:11:57.297916 6103 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:11:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:12:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:13Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.007931 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.027784 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.039565 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3cfd1e01-432a-479f-a783-6b9e1acfcb16\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04aa23baca3c99a463fd363ffaf50dc400529a0a3d4af30a55b2353eb8e2dd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c0a408a02f1929bf23ad55f145eee897886adc62cc66804f18387d36891c85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af51bb8f542b6d1141422c7d41cd987e860d572c385b90d3f38f5f57cbd902c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.051706 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.051774 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.051793 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.051824 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.051845 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:14Z","lastTransitionTime":"2025-10-13T21:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.059132 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.072090 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.085311 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.096867 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.107443 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nffnw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5bb2ee-abeb-4342-929a-d61e89f30351\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nffnw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.124792 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.137933 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.153894 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.153942 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.153956 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.153973 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.153984 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:14Z","lastTransitionTime":"2025-10-13T21:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.256427 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.256464 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.256473 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.256489 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.256498 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:14Z","lastTransitionTime":"2025-10-13T21:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.286407 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovnkube-controller/2.log" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.287154 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovnkube-controller/1.log" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.289264 4689 generic.go:334] "Generic (PLEG): container finished" podID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerID="013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f" exitCode=1 Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.289328 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerDied","Data":"013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f"} Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.289378 4689 scope.go:117] "RemoveContainer" containerID="20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.289935 4689 scope.go:117] "RemoveContainer" containerID="013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f" Oct 13 21:12:14 crc kubenswrapper[4689]: E1013 21:12:14.290101 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.309872 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.335682 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.359384 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.359463 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.359483 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.359514 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.359535 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:14Z","lastTransitionTime":"2025-10-13T21:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.364961 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20ad0bcb25c70056a7cc63744ea43c7b143f50f2f25e9273589193d36aa22d10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"message\\\":\\\"d (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1013 21:11:57.297541 6103 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 21:11:57.297596 6103 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 21:11:57.297623 6103 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:11:57.297634 6103 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:11:57.297661 6103 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:11:57.297672 6103 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:11:57.297690 6103 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:11:57.297692 6103 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:11:57.297700 6103 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:11:57.297712 6103 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:11:57.297732 6103 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:11:57.297811 6103 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:11:57.297856 6103 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:11:57.297898 6103 factory.go:656] Stopping watch factory\\\\nI1013 21:11:57.297916 6103 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:11:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:13Z\\\",\\\"message\\\":\\\"cy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1013 21:12:13.867886 6327 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1013 21:12:13.868348 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:12:13.868383 6327 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:12:13.868403 6327 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:12:13.868424 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:12:13.868445 6327 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:12:13.868464 6327 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:12:13.868509 6327 factory.go:656] Stopping watch factory\\\\nI1013 21:12:13.868545 6327 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:12:13.868561 6327 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:12:13.868574 6327 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:12:13.868610 6327 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 21:12:13.869148 6327 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:12:13.869214 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1013 21:12:13.869349 6327 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:12:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.380205 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.408162 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.422070 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3cfd1e01-432a-479f-a783-6b9e1acfcb16\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04aa23baca3c99a463fd363ffaf50dc400529a0a3d4af30a55b2353eb8e2dd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c0a408a02f1929bf23ad55f145eee897886adc62cc66804f18387d36891c85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af51bb8f542b6d1141422c7d41cd987e860d572c385b90d3f38f5f57cbd902c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.436124 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.454977 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.468199 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.468284 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.468304 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.468335 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.468353 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:14Z","lastTransitionTime":"2025-10-13T21:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.473427 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.486501 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.496870 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nffnw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5bb2ee-abeb-4342-929a-d61e89f30351\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nffnw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.508715 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.521516 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.535122 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.555626 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.567454 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.570743 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.570781 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.570795 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.570815 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.570830 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:14Z","lastTransitionTime":"2025-10-13T21:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.588067 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.604170 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:14Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.673422 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.673524 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.673545 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.673575 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.673639 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:14Z","lastTransitionTime":"2025-10-13T21:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.777183 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.777246 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.777262 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.777284 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.777299 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:14Z","lastTransitionTime":"2025-10-13T21:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.867426 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:14 crc kubenswrapper[4689]: E1013 21:12:14.867652 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.881352 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.881410 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.881432 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.881454 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.881474 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:14Z","lastTransitionTime":"2025-10-13T21:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.985147 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.985211 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.985230 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.985260 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:14 crc kubenswrapper[4689]: I1013 21:12:14.985282 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:14Z","lastTransitionTime":"2025-10-13T21:12:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.089493 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.089557 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.089578 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.089671 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.089693 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:15Z","lastTransitionTime":"2025-10-13T21:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.194388 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.194858 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.195060 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.195218 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.195352 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:15Z","lastTransitionTime":"2025-10-13T21:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.296393 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovnkube-controller/2.log" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.298170 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.298363 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.298626 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.298883 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.299150 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:15Z","lastTransitionTime":"2025-10-13T21:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.304058 4689 scope.go:117] "RemoveContainer" containerID="013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f" Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.304339 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.335515 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:13Z\\\",\\\"message\\\":\\\"cy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1013 21:12:13.867886 6327 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1013 21:12:13.868348 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:12:13.868383 6327 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:12:13.868403 6327 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:12:13.868424 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:12:13.868445 6327 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:12:13.868464 6327 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:12:13.868509 6327 factory.go:656] Stopping watch factory\\\\nI1013 21:12:13.868545 6327 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:12:13.868561 6327 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:12:13.868574 6327 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:12:13.868610 6327 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 21:12:13.869148 6327 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:12:13.869214 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1013 21:12:13.869349 6327 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:12:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.354402 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.387735 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.403072 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.403112 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.403131 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.403162 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.403186 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:15Z","lastTransitionTime":"2025-10-13T21:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.406859 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3cfd1e01-432a-479f-a783-6b9e1acfcb16\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04aa23baca3c99a463fd363ffaf50dc400529a0a3d4af30a55b2353eb8e2dd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c0a408a02f1929bf23ad55f145eee897886adc62cc66804f18387d36891c85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af51bb8f542b6d1141422c7d41cd987e860d572c385b90d3f38f5f57cbd902c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.426529 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.443172 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.460271 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.464864 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.465097 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:12:47.465048842 +0000 UTC m=+84.383293967 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.465250 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.465351 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.465522 4689 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.465636 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 21:12:47.465613274 +0000 UTC m=+84.383858399 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.465716 4689 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.465810 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 21:12:47.465790628 +0000 UTC m=+84.384035743 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.475337 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.494169 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nffnw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5bb2ee-abeb-4342-929a-d61e89f30351\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nffnw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.506828 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.506931 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.506953 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.506983 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.507007 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:15Z","lastTransitionTime":"2025-10-13T21:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.515898 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.534785 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.553071 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.566364 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.566433 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.566606 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.566625 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.566639 4689 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.566711 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-13 21:12:47.566685197 +0000 UTC m=+84.484930282 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.566740 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.566788 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.566811 4689 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.566906 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-13 21:12:47.566877913 +0000 UTC m=+84.485123028 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.570663 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.582908 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.599240 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.609465 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.609505 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.609517 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.609540 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.609558 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:15Z","lastTransitionTime":"2025-10-13T21:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.613378 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.631881 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.647288 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:15Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.713268 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.713369 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.713386 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.713436 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.713451 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:15Z","lastTransitionTime":"2025-10-13T21:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.817763 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.817852 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.817871 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.817910 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.817951 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:15Z","lastTransitionTime":"2025-10-13T21:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.866866 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.866982 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.866880 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.867204 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.867350 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.867737 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.870974 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs\") pod \"network-metrics-daemon-nffnw\" (UID: \"3f5bb2ee-abeb-4342-929a-d61e89f30351\") " pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.872025 4689 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 21:12:15 crc kubenswrapper[4689]: E1013 21:12:15.872841 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs podName:3f5bb2ee-abeb-4342-929a-d61e89f30351 nodeName:}" failed. No retries permitted until 2025-10-13 21:12:31.872801715 +0000 UTC m=+68.791046840 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs") pod "network-metrics-daemon-nffnw" (UID: "3f5bb2ee-abeb-4342-929a-d61e89f30351") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.922450 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.922903 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.923158 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.923397 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:15 crc kubenswrapper[4689]: I1013 21:12:15.923657 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:15Z","lastTransitionTime":"2025-10-13T21:12:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.026398 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.026894 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.027042 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.027190 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.027390 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:16Z","lastTransitionTime":"2025-10-13T21:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.131126 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.131217 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.131239 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.131269 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.131290 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:16Z","lastTransitionTime":"2025-10-13T21:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.234700 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.234784 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.234803 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.234835 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.234858 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:16Z","lastTransitionTime":"2025-10-13T21:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.338552 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.339171 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.339301 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.339508 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.339698 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:16Z","lastTransitionTime":"2025-10-13T21:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.442769 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.442827 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.442843 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.442865 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.442877 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:16Z","lastTransitionTime":"2025-10-13T21:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.545944 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.546022 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.546042 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.546070 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.546090 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:16Z","lastTransitionTime":"2025-10-13T21:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.649045 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.649133 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.649154 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.649184 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.649203 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:16Z","lastTransitionTime":"2025-10-13T21:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.752050 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.752126 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.752150 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.752209 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.752228 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:16Z","lastTransitionTime":"2025-10-13T21:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.854709 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.854780 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.854798 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.854827 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.854847 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:16Z","lastTransitionTime":"2025-10-13T21:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.867067 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:16 crc kubenswrapper[4689]: E1013 21:12:16.867271 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.958203 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.958305 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.958332 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.958363 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:16 crc kubenswrapper[4689]: I1013 21:12:16.958387 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:16Z","lastTransitionTime":"2025-10-13T21:12:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.061815 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.061895 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.061921 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.061953 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.061974 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:17Z","lastTransitionTime":"2025-10-13T21:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.165411 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.165482 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.165504 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.165535 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.165554 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:17Z","lastTransitionTime":"2025-10-13T21:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.269612 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.269685 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.269707 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.269732 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.269749 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:17Z","lastTransitionTime":"2025-10-13T21:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.373268 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.373316 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.373329 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.373347 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.373359 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:17Z","lastTransitionTime":"2025-10-13T21:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.476978 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.477043 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.477060 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.477081 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.477096 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:17Z","lastTransitionTime":"2025-10-13T21:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.581435 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.581531 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.581553 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.581637 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.581662 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:17Z","lastTransitionTime":"2025-10-13T21:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.685356 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.685412 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.685426 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.685445 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.685463 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:17Z","lastTransitionTime":"2025-10-13T21:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.789462 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.789520 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.789539 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.789565 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.789616 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:17Z","lastTransitionTime":"2025-10-13T21:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.867316 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.867469 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:17 crc kubenswrapper[4689]: E1013 21:12:17.867549 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.867673 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:17 crc kubenswrapper[4689]: E1013 21:12:17.867868 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:17 crc kubenswrapper[4689]: E1013 21:12:17.867996 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.892171 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.892243 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.892263 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.892352 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.892375 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:17Z","lastTransitionTime":"2025-10-13T21:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.996566 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.996656 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.996680 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.996705 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:17 crc kubenswrapper[4689]: I1013 21:12:17.996722 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:17Z","lastTransitionTime":"2025-10-13T21:12:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.099824 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.099883 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.099901 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.099925 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.099942 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:18Z","lastTransitionTime":"2025-10-13T21:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.203879 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.203942 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.203956 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.203979 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.203994 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:18Z","lastTransitionTime":"2025-10-13T21:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.307062 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.307130 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.307144 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.307170 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.307186 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:18Z","lastTransitionTime":"2025-10-13T21:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.411043 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.411139 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.411172 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.411209 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.411236 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:18Z","lastTransitionTime":"2025-10-13T21:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.515209 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.515270 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.515282 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.515306 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.515319 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:18Z","lastTransitionTime":"2025-10-13T21:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.619746 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.619823 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.619841 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.619866 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.619885 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:18Z","lastTransitionTime":"2025-10-13T21:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.724272 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.724352 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.724371 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.724397 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.724417 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:18Z","lastTransitionTime":"2025-10-13T21:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.827429 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.827488 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.827499 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.827519 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.827530 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:18Z","lastTransitionTime":"2025-10-13T21:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.866880 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:18 crc kubenswrapper[4689]: E1013 21:12:18.867096 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.931174 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.931240 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.931260 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.931289 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:18 crc kubenswrapper[4689]: I1013 21:12:18.931316 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:18Z","lastTransitionTime":"2025-10-13T21:12:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.035488 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.035574 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.035641 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.035677 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.035703 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:19Z","lastTransitionTime":"2025-10-13T21:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.139401 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.139483 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.139502 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.139524 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.139540 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:19Z","lastTransitionTime":"2025-10-13T21:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.242620 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.242676 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.242685 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.242703 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.242715 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:19Z","lastTransitionTime":"2025-10-13T21:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.346093 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.346138 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.346151 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.346171 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.346185 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:19Z","lastTransitionTime":"2025-10-13T21:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.450088 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.450186 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.450215 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.450255 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.450283 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:19Z","lastTransitionTime":"2025-10-13T21:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.554503 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.554572 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.554618 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.554640 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.554650 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:19Z","lastTransitionTime":"2025-10-13T21:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.659545 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.659685 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.659706 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.659733 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.659755 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:19Z","lastTransitionTime":"2025-10-13T21:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.763682 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.763753 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.763774 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.763805 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.763831 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:19Z","lastTransitionTime":"2025-10-13T21:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.866727 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:19 crc kubenswrapper[4689]: E1013 21:12:19.866912 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.866981 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:19 crc kubenswrapper[4689]: E1013 21:12:19.867152 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.867188 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.867254 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.867275 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.867304 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.867327 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:19Z","lastTransitionTime":"2025-10-13T21:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.867331 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:19 crc kubenswrapper[4689]: E1013 21:12:19.867625 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.970326 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.970384 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.970401 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.970428 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:19 crc kubenswrapper[4689]: I1013 21:12:19.970447 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:19Z","lastTransitionTime":"2025-10-13T21:12:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.074189 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.074255 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.074273 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.074301 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.074321 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:20Z","lastTransitionTime":"2025-10-13T21:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.177672 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.177717 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.177733 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.177762 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.177780 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:20Z","lastTransitionTime":"2025-10-13T21:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.282672 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.282749 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.282773 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.282805 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.282823 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:20Z","lastTransitionTime":"2025-10-13T21:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.386994 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.387143 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.387170 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.387206 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.387241 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:20Z","lastTransitionTime":"2025-10-13T21:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.490222 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.490280 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.490297 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.490322 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.490340 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:20Z","lastTransitionTime":"2025-10-13T21:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.594111 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.594202 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.594223 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.594254 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.594279 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:20Z","lastTransitionTime":"2025-10-13T21:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.699992 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.700079 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.700103 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.700136 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.700170 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:20Z","lastTransitionTime":"2025-10-13T21:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.702089 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.702148 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.702179 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.702219 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.702242 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:20Z","lastTransitionTime":"2025-10-13T21:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:20 crc kubenswrapper[4689]: E1013 21:12:20.729539 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:20Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.737055 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.737133 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.737160 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.737193 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.737218 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:20Z","lastTransitionTime":"2025-10-13T21:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:20 crc kubenswrapper[4689]: E1013 21:12:20.758407 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:20Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.764757 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.764824 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.764845 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.764874 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.764894 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:20Z","lastTransitionTime":"2025-10-13T21:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:20 crc kubenswrapper[4689]: E1013 21:12:20.788416 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:20Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.793847 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.793913 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.793929 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.793949 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.793962 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:20Z","lastTransitionTime":"2025-10-13T21:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:20 crc kubenswrapper[4689]: E1013 21:12:20.807725 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:20Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.812786 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.813009 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.813084 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.813193 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.813304 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:20Z","lastTransitionTime":"2025-10-13T21:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:20 crc kubenswrapper[4689]: E1013 21:12:20.828384 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:20Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:20 crc kubenswrapper[4689]: E1013 21:12:20.828684 4689 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.831856 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.831971 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.832000 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.832079 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.832243 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:20Z","lastTransitionTime":"2025-10-13T21:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.867366 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:20 crc kubenswrapper[4689]: E1013 21:12:20.867635 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.936911 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.936986 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.937010 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.937043 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:20 crc kubenswrapper[4689]: I1013 21:12:20.937064 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:20Z","lastTransitionTime":"2025-10-13T21:12:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.041031 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.041079 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.041091 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.041109 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.041121 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:21Z","lastTransitionTime":"2025-10-13T21:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.144767 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.144826 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.144844 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.144869 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.144892 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:21Z","lastTransitionTime":"2025-10-13T21:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.248493 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.248573 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.248620 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.248651 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.248677 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:21Z","lastTransitionTime":"2025-10-13T21:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.352947 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.353026 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.353050 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.353088 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.353114 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:21Z","lastTransitionTime":"2025-10-13T21:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.457123 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.457229 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.457257 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.457292 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.457374 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:21Z","lastTransitionTime":"2025-10-13T21:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.561579 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.561686 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.561708 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.561733 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.561752 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:21Z","lastTransitionTime":"2025-10-13T21:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.665142 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.665472 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.665576 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.665739 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.665836 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:21Z","lastTransitionTime":"2025-10-13T21:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.769320 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.769382 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.769398 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.769423 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.769439 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:21Z","lastTransitionTime":"2025-10-13T21:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.867286 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.867286 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:21 crc kubenswrapper[4689]: E1013 21:12:21.867443 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:21 crc kubenswrapper[4689]: E1013 21:12:21.867646 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.867736 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:21 crc kubenswrapper[4689]: E1013 21:12:21.868030 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.872495 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.872578 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.872644 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.872673 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.872693 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:21Z","lastTransitionTime":"2025-10-13T21:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.976825 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.976888 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.976907 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.976937 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:21 crc kubenswrapper[4689]: I1013 21:12:21.976970 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:21Z","lastTransitionTime":"2025-10-13T21:12:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.079761 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.079824 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.079843 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.079869 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.079890 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:22Z","lastTransitionTime":"2025-10-13T21:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.182948 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.183540 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.183812 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.184036 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.184260 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:22Z","lastTransitionTime":"2025-10-13T21:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.287409 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.287469 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.287481 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.287506 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.287518 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:22Z","lastTransitionTime":"2025-10-13T21:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.391505 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.391580 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.391637 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.391670 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.391695 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:22Z","lastTransitionTime":"2025-10-13T21:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.495157 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.495227 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.495263 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.495296 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.495321 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:22Z","lastTransitionTime":"2025-10-13T21:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.598489 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.598620 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.598649 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.598727 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.598757 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:22Z","lastTransitionTime":"2025-10-13T21:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.703390 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.703449 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.703475 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.703509 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.703533 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:22Z","lastTransitionTime":"2025-10-13T21:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.807489 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.807561 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.807579 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.807643 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.807671 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:22Z","lastTransitionTime":"2025-10-13T21:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.867284 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:22 crc kubenswrapper[4689]: E1013 21:12:22.867628 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.910668 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.910742 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.910766 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.910796 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:22 crc kubenswrapper[4689]: I1013 21:12:22.910814 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:22Z","lastTransitionTime":"2025-10-13T21:12:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.014467 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.014519 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.014535 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.014560 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.014582 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:23Z","lastTransitionTime":"2025-10-13T21:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.118330 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.118399 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.118418 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.118446 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.118465 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:23Z","lastTransitionTime":"2025-10-13T21:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.222632 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.222708 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.222727 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.222755 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.222774 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:23Z","lastTransitionTime":"2025-10-13T21:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.326440 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.326518 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.326541 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.326575 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.326647 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:23Z","lastTransitionTime":"2025-10-13T21:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.430174 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.430262 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.430291 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.430327 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.430354 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:23Z","lastTransitionTime":"2025-10-13T21:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.534543 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.534627 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.534649 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.534679 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.534701 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:23Z","lastTransitionTime":"2025-10-13T21:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.638691 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.638753 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.638771 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.638800 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.638825 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:23Z","lastTransitionTime":"2025-10-13T21:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.743277 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.743881 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.744072 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.744243 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.744557 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:23Z","lastTransitionTime":"2025-10-13T21:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.848743 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.848865 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.848890 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.848923 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.848945 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:23Z","lastTransitionTime":"2025-10-13T21:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.866663 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:23 crc kubenswrapper[4689]: E1013 21:12:23.867279 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.868332 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:23 crc kubenswrapper[4689]: E1013 21:12:23.868616 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.869095 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:23 crc kubenswrapper[4689]: E1013 21:12:23.869322 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.891218 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:23Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.914288 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:23Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.933259 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:23Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.952340 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.952400 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.952414 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.952434 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.952449 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:23Z","lastTransitionTime":"2025-10-13T21:12:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.954909 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:23Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.973939 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:23Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:23 crc kubenswrapper[4689]: I1013 21:12:23.993178 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nffnw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5bb2ee-abeb-4342-929a-d61e89f30351\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nffnw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:23Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.012507 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:24Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.032459 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:24Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.053829 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:24Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.056810 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.056963 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.056997 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.057032 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.057054 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:24Z","lastTransitionTime":"2025-10-13T21:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.073416 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:24Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.095985 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:24Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.112939 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:24Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.150280 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:24Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.161218 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.161283 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.161342 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.161372 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.161393 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:24Z","lastTransitionTime":"2025-10-13T21:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.174136 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3cfd1e01-432a-479f-a783-6b9e1acfcb16\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04aa23baca3c99a463fd363ffaf50dc400529a0a3d4af30a55b2353eb8e2dd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c0a408a02f1929bf23ad55f145eee897886adc62cc66804f18387d36891c85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af51bb8f542b6d1141422c7d41cd987e860d572c385b90d3f38f5f57cbd902c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:24Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.195763 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:24Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.217032 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:24Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.242476 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:13Z\\\",\\\"message\\\":\\\"cy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1013 21:12:13.867886 6327 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1013 21:12:13.868348 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:12:13.868383 6327 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:12:13.868403 6327 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:12:13.868424 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:12:13.868445 6327 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:12:13.868464 6327 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:12:13.868509 6327 factory.go:656] Stopping watch factory\\\\nI1013 21:12:13.868545 6327 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:12:13.868561 6327 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:12:13.868574 6327 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:12:13.868610 6327 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 21:12:13.869148 6327 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:12:13.869214 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1013 21:12:13.869349 6327 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:12:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:24Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.258765 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:24Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.263872 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.264105 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.264228 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.264365 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.264492 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:24Z","lastTransitionTime":"2025-10-13T21:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.367763 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.367829 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.367848 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.367874 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.368002 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:24Z","lastTransitionTime":"2025-10-13T21:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.471221 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.471277 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.471288 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.471309 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.471324 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:24Z","lastTransitionTime":"2025-10-13T21:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.575972 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.576010 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.576021 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.576039 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.576049 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:24Z","lastTransitionTime":"2025-10-13T21:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.681000 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.681098 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.681124 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.681153 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.681174 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:24Z","lastTransitionTime":"2025-10-13T21:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.786803 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.786885 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.786906 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.786933 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.786952 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:24Z","lastTransitionTime":"2025-10-13T21:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.866494 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:24 crc kubenswrapper[4689]: E1013 21:12:24.866724 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.890321 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.890844 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.891022 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.891185 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.891426 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:24Z","lastTransitionTime":"2025-10-13T21:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.996669 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.996716 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.996738 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.996763 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:24 crc kubenswrapper[4689]: I1013 21:12:24.996781 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:24Z","lastTransitionTime":"2025-10-13T21:12:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.100011 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.100070 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.100137 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.100164 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.100182 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:25Z","lastTransitionTime":"2025-10-13T21:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.204154 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.204244 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.204265 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.204301 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.204323 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:25Z","lastTransitionTime":"2025-10-13T21:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.307649 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.307715 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.307734 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.307761 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.307779 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:25Z","lastTransitionTime":"2025-10-13T21:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.410132 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.410204 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.410230 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.410262 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.410286 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:25Z","lastTransitionTime":"2025-10-13T21:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.514275 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.514343 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.514361 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.514388 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.514406 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:25Z","lastTransitionTime":"2025-10-13T21:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.618383 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.618455 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.618474 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.618500 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.618519 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:25Z","lastTransitionTime":"2025-10-13T21:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.725300 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.725384 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.725408 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.725445 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.725471 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:25Z","lastTransitionTime":"2025-10-13T21:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.829349 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.829449 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.829471 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.829495 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.829515 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:25Z","lastTransitionTime":"2025-10-13T21:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.867888 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.867894 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.867905 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:25 crc kubenswrapper[4689]: E1013 21:12:25.868125 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.869536 4689 scope.go:117] "RemoveContainer" containerID="013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f" Oct 13 21:12:25 crc kubenswrapper[4689]: E1013 21:12:25.869524 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:25 crc kubenswrapper[4689]: E1013 21:12:25.869701 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:25 crc kubenswrapper[4689]: E1013 21:12:25.869979 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.934022 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.934577 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.934852 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.935033 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:25 crc kubenswrapper[4689]: I1013 21:12:25.935176 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:25Z","lastTransitionTime":"2025-10-13T21:12:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.039115 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.040193 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.040648 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.040863 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.041186 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:26Z","lastTransitionTime":"2025-10-13T21:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.145283 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.145366 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.145388 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.145420 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.145440 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:26Z","lastTransitionTime":"2025-10-13T21:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.249580 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.250203 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.250674 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.251026 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.251520 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:26Z","lastTransitionTime":"2025-10-13T21:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.356276 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.357148 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.357422 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.357629 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.357789 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:26Z","lastTransitionTime":"2025-10-13T21:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.461625 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.462102 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.462352 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.462797 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.463175 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:26Z","lastTransitionTime":"2025-10-13T21:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.566688 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.567259 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.567407 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.567572 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.567786 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:26Z","lastTransitionTime":"2025-10-13T21:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.672191 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.672768 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.672964 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.673109 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.673248 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:26Z","lastTransitionTime":"2025-10-13T21:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.776883 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.777360 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.777487 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.777618 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.777745 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:26Z","lastTransitionTime":"2025-10-13T21:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.866805 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:26 crc kubenswrapper[4689]: E1013 21:12:26.866999 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.881683 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.881985 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.882203 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.882380 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.882523 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:26Z","lastTransitionTime":"2025-10-13T21:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.987480 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.987551 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.987566 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.987615 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:26 crc kubenswrapper[4689]: I1013 21:12:26.987632 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:26Z","lastTransitionTime":"2025-10-13T21:12:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.091469 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.091531 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.091544 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.091568 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.091612 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:27Z","lastTransitionTime":"2025-10-13T21:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.194903 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.195024 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.195041 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.195062 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.195076 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:27Z","lastTransitionTime":"2025-10-13T21:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.299097 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.299164 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.299180 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.299214 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.299233 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:27Z","lastTransitionTime":"2025-10-13T21:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.402659 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.403261 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.403467 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.403673 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.403893 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:27Z","lastTransitionTime":"2025-10-13T21:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.508175 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.508242 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.508260 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.508289 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.508315 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:27Z","lastTransitionTime":"2025-10-13T21:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.612117 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.612171 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.612189 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.612220 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.612240 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:27Z","lastTransitionTime":"2025-10-13T21:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.716139 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.716206 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.716225 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.716259 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.716296 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:27Z","lastTransitionTime":"2025-10-13T21:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.820271 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.820340 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.820358 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.820385 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.820404 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:27Z","lastTransitionTime":"2025-10-13T21:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.867670 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.867758 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.867790 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:27 crc kubenswrapper[4689]: E1013 21:12:27.867918 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:27 crc kubenswrapper[4689]: E1013 21:12:27.868027 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:27 crc kubenswrapper[4689]: E1013 21:12:27.868232 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.923963 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.924025 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.924037 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.924057 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:27 crc kubenswrapper[4689]: I1013 21:12:27.924069 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:27Z","lastTransitionTime":"2025-10-13T21:12:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.027249 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.027296 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.027307 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.027326 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.027336 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:28Z","lastTransitionTime":"2025-10-13T21:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.130896 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.130971 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.130992 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.131017 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.131032 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:28Z","lastTransitionTime":"2025-10-13T21:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.233913 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.233955 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.233971 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.233998 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.234020 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:28Z","lastTransitionTime":"2025-10-13T21:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.336868 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.336919 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.336931 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.336958 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.336973 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:28Z","lastTransitionTime":"2025-10-13T21:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.440373 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.440430 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.440441 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.440463 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.440475 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:28Z","lastTransitionTime":"2025-10-13T21:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.543138 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.543201 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.543231 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.543250 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.543263 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:28Z","lastTransitionTime":"2025-10-13T21:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.646155 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.646217 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.646236 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.646259 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.646272 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:28Z","lastTransitionTime":"2025-10-13T21:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.749248 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.749297 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.749307 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.749325 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.749338 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:28Z","lastTransitionTime":"2025-10-13T21:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.852362 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.852439 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.852453 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.852475 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.852496 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:28Z","lastTransitionTime":"2025-10-13T21:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.866791 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:28 crc kubenswrapper[4689]: E1013 21:12:28.866933 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.956416 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.956476 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.956491 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.956516 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:28 crc kubenswrapper[4689]: I1013 21:12:28.956535 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:28Z","lastTransitionTime":"2025-10-13T21:12:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.060849 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.060911 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.060924 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.060947 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.060961 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:29Z","lastTransitionTime":"2025-10-13T21:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.164492 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.165336 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.165423 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.165505 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.165574 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:29Z","lastTransitionTime":"2025-10-13T21:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.269435 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.269510 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.269531 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.269560 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.269615 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:29Z","lastTransitionTime":"2025-10-13T21:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.371721 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.371778 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.371796 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.371818 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.371832 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:29Z","lastTransitionTime":"2025-10-13T21:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.474454 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.474509 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.474522 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.474546 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.474564 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:29Z","lastTransitionTime":"2025-10-13T21:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.578428 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.578497 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.578515 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.578543 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.578563 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:29Z","lastTransitionTime":"2025-10-13T21:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.681759 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.681819 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.681837 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.681868 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.681890 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:29Z","lastTransitionTime":"2025-10-13T21:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.784575 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.784704 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.784721 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.784745 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.784759 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:29Z","lastTransitionTime":"2025-10-13T21:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.867568 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.867662 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:29 crc kubenswrapper[4689]: E1013 21:12:29.867915 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.867980 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:29 crc kubenswrapper[4689]: E1013 21:12:29.868192 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:29 crc kubenswrapper[4689]: E1013 21:12:29.868342 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.888059 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.888389 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.888569 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.888812 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.888970 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:29Z","lastTransitionTime":"2025-10-13T21:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.992466 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.992956 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.993199 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.993393 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:29 crc kubenswrapper[4689]: I1013 21:12:29.993552 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:29Z","lastTransitionTime":"2025-10-13T21:12:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.096907 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.097437 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.097574 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.097758 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.097895 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:30Z","lastTransitionTime":"2025-10-13T21:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.200965 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.202020 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.202214 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.202389 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.202618 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:30Z","lastTransitionTime":"2025-10-13T21:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.306742 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.306818 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.306836 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.306868 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.306888 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:30Z","lastTransitionTime":"2025-10-13T21:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.410400 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.410446 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.410487 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.410512 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.410526 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:30Z","lastTransitionTime":"2025-10-13T21:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.514233 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.514849 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.514976 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.515128 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.515271 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:30Z","lastTransitionTime":"2025-10-13T21:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.619130 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.619558 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.619795 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.620005 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.620205 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:30Z","lastTransitionTime":"2025-10-13T21:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.723991 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.724049 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.724068 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.724094 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.724113 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:30Z","lastTransitionTime":"2025-10-13T21:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.827193 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.827298 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.827319 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.827346 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.827365 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:30Z","lastTransitionTime":"2025-10-13T21:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.866787 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:30 crc kubenswrapper[4689]: E1013 21:12:30.866914 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.876774 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.876817 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.876833 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.876850 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.876868 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:30Z","lastTransitionTime":"2025-10-13T21:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.881175 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 13 21:12:30 crc kubenswrapper[4689]: E1013 21:12:30.893134 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:30Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.903543 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.903663 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.903687 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.903716 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.903736 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:30Z","lastTransitionTime":"2025-10-13T21:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:30 crc kubenswrapper[4689]: E1013 21:12:30.918949 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:30Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.924951 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.925030 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.925059 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.925085 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.925104 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:30Z","lastTransitionTime":"2025-10-13T21:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:30 crc kubenswrapper[4689]: E1013 21:12:30.938309 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:30Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.942350 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.942401 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.942411 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.942432 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.942448 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:30Z","lastTransitionTime":"2025-10-13T21:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:30 crc kubenswrapper[4689]: E1013 21:12:30.959704 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:30Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.963804 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.963861 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.963879 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.963902 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.963917 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:30Z","lastTransitionTime":"2025-10-13T21:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:30 crc kubenswrapper[4689]: E1013 21:12:30.978997 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:30Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:30 crc kubenswrapper[4689]: E1013 21:12:30.979149 4689 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.980968 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.981008 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.981020 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.981036 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:30 crc kubenswrapper[4689]: I1013 21:12:30.981048 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:30Z","lastTransitionTime":"2025-10-13T21:12:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.084113 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.084161 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.084173 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.084197 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.084211 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:31Z","lastTransitionTime":"2025-10-13T21:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.186896 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.186967 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.186984 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.187031 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.187048 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:31Z","lastTransitionTime":"2025-10-13T21:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.290034 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.290102 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.290122 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.290149 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.290167 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:31Z","lastTransitionTime":"2025-10-13T21:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.393447 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.393501 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.393518 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.393544 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.393570 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:31Z","lastTransitionTime":"2025-10-13T21:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.496659 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.496738 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.496756 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.496784 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.496802 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:31Z","lastTransitionTime":"2025-10-13T21:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.601568 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.601726 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.601788 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.601818 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.601840 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:31Z","lastTransitionTime":"2025-10-13T21:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.704671 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.704732 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.704750 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.704776 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.704794 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:31Z","lastTransitionTime":"2025-10-13T21:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.808400 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.808492 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.808522 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.808559 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.808623 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:31Z","lastTransitionTime":"2025-10-13T21:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.866966 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.867063 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.867140 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:31 crc kubenswrapper[4689]: E1013 21:12:31.867331 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:31 crc kubenswrapper[4689]: E1013 21:12:31.867503 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:31 crc kubenswrapper[4689]: E1013 21:12:31.867671 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.881335 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs\") pod \"network-metrics-daemon-nffnw\" (UID: \"3f5bb2ee-abeb-4342-929a-d61e89f30351\") " pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:31 crc kubenswrapper[4689]: E1013 21:12:31.881629 4689 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 21:12:31 crc kubenswrapper[4689]: E1013 21:12:31.881732 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs podName:3f5bb2ee-abeb-4342-929a-d61e89f30351 nodeName:}" failed. No retries permitted until 2025-10-13 21:13:03.88170284 +0000 UTC m=+100.799947965 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs") pod "network-metrics-daemon-nffnw" (UID: "3f5bb2ee-abeb-4342-929a-d61e89f30351") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.911785 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.911856 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.911880 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.911919 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:31 crc kubenswrapper[4689]: I1013 21:12:31.911945 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:31Z","lastTransitionTime":"2025-10-13T21:12:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.015137 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.015210 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.015232 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.015260 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.015279 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:32Z","lastTransitionTime":"2025-10-13T21:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.118919 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.119010 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.119039 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.119074 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.119098 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:32Z","lastTransitionTime":"2025-10-13T21:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.222897 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.222996 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.223018 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.223050 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.223068 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:32Z","lastTransitionTime":"2025-10-13T21:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.326909 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.326970 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.326987 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.327015 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.327036 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:32Z","lastTransitionTime":"2025-10-13T21:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.431540 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.431734 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.431793 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.431880 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.431897 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:32Z","lastTransitionTime":"2025-10-13T21:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.534738 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.534792 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.534803 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.534822 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.534836 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:32Z","lastTransitionTime":"2025-10-13T21:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.638204 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.638260 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.638271 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.638289 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.638299 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:32Z","lastTransitionTime":"2025-10-13T21:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.741815 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.742283 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.742400 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.742515 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.742615 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:32Z","lastTransitionTime":"2025-10-13T21:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.845554 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.845984 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.846060 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.846163 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.846242 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:32Z","lastTransitionTime":"2025-10-13T21:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.866956 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:32 crc kubenswrapper[4689]: E1013 21:12:32.867122 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.950019 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.950089 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.950107 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.950133 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:32 crc kubenswrapper[4689]: I1013 21:12:32.950151 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:32Z","lastTransitionTime":"2025-10-13T21:12:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.053819 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.053887 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.053903 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.053923 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.053957 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:33Z","lastTransitionTime":"2025-10-13T21:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.157334 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.157373 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.157385 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.157441 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.157692 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:33Z","lastTransitionTime":"2025-10-13T21:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.261277 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.261308 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.261333 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.261348 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.261360 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:33Z","lastTransitionTime":"2025-10-13T21:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.364268 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.364361 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.364388 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.364427 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.364453 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:33Z","lastTransitionTime":"2025-10-13T21:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.378323 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xr7rr_632b68ca-d2a4-4570-a0a2-8ea8d204fb59/kube-multus/0.log" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.378418 4689 generic.go:334] "Generic (PLEG): container finished" podID="632b68ca-d2a4-4570-a0a2-8ea8d204fb59" containerID="d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44" exitCode=1 Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.378490 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xr7rr" event={"ID":"632b68ca-d2a4-4570-a0a2-8ea8d204fb59","Type":"ContainerDied","Data":"d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44"} Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.379349 4689 scope.go:117] "RemoveContainer" containerID="d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.406389 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.420322 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.455041 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.467497 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.467542 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.467555 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.467575 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.467627 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:33Z","lastTransitionTime":"2025-10-13T21:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.501937 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.524158 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.543860 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:33Z\\\",\\\"message\\\":\\\"2025-10-13T21:11:47+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d67bd457-1d6d-4d1c-9433-278646395a6a\\\\n2025-10-13T21:11:47+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d67bd457-1d6d-4d1c-9433-278646395a6a to /host/opt/cni/bin/\\\\n2025-10-13T21:11:47Z [verbose] multus-daemon started\\\\n2025-10-13T21:11:47Z [verbose] Readiness Indicator file check\\\\n2025-10-13T21:12:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.568234 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:13Z\\\",\\\"message\\\":\\\"cy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1013 21:12:13.867886 6327 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1013 21:12:13.868348 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:12:13.868383 6327 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:12:13.868403 6327 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:12:13.868424 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:12:13.868445 6327 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:12:13.868464 6327 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:12:13.868509 6327 factory.go:656] Stopping watch factory\\\\nI1013 21:12:13.868545 6327 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:12:13.868561 6327 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:12:13.868574 6327 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:12:13.868610 6327 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 21:12:13.869148 6327 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:12:13.869214 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1013 21:12:13.869349 6327 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:12:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.570171 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.570212 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.570223 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.570237 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.570249 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:33Z","lastTransitionTime":"2025-10-13T21:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.585093 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.606023 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.618431 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3cfd1e01-432a-479f-a783-6b9e1acfcb16\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04aa23baca3c99a463fd363ffaf50dc400529a0a3d4af30a55b2353eb8e2dd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c0a408a02f1929bf23ad55f145eee897886adc62cc66804f18387d36891c85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af51bb8f542b6d1141422c7d41cd987e860d572c385b90d3f38f5f57cbd902c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.632386 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.647631 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.659387 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.672280 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.672962 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.673030 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.673047 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.673073 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.673091 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:33Z","lastTransitionTime":"2025-10-13T21:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.687709 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nffnw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5bb2ee-abeb-4342-929a-d61e89f30351\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nffnw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.699528 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bd15308b-c34d-49c6-ae7b-0ec0bfd35ffa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d81c82ccbccf9dedc89e79f02bbb7e9d90dfcc4d8e3a3cab517bed3c59b80c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.714410 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.731959 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.745403 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.776974 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.777029 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.777043 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.777065 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.777078 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:33Z","lastTransitionTime":"2025-10-13T21:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.867284 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:33 crc kubenswrapper[4689]: E1013 21:12:33.867426 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.867658 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:33 crc kubenswrapper[4689]: E1013 21:12:33.867739 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.867906 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:33 crc kubenswrapper[4689]: E1013 21:12:33.868097 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.889939 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.906632 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.915816 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.915870 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.915887 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.915908 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.915929 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:33Z","lastTransitionTime":"2025-10-13T21:12:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.926917 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.938965 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.963889 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.981629 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:33 crc kubenswrapper[4689]: I1013 21:12:33.996928 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:33Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.016874 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:33Z\\\",\\\"message\\\":\\\"2025-10-13T21:11:47+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d67bd457-1d6d-4d1c-9433-278646395a6a\\\\n2025-10-13T21:11:47+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d67bd457-1d6d-4d1c-9433-278646395a6a to /host/opt/cni/bin/\\\\n2025-10-13T21:11:47Z [verbose] multus-daemon started\\\\n2025-10-13T21:11:47Z [verbose] Readiness Indicator file check\\\\n2025-10-13T21:12:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.018915 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.019011 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.019069 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.019135 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.019198 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:34Z","lastTransitionTime":"2025-10-13T21:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.040506 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:13Z\\\",\\\"message\\\":\\\"cy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1013 21:12:13.867886 6327 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1013 21:12:13.868348 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:12:13.868383 6327 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:12:13.868403 6327 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:12:13.868424 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:12:13.868445 6327 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:12:13.868464 6327 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:12:13.868509 6327 factory.go:656] Stopping watch factory\\\\nI1013 21:12:13.868545 6327 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:12:13.868561 6327 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:12:13.868574 6327 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:12:13.868610 6327 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 21:12:13.869148 6327 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:12:13.869214 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1013 21:12:13.869349 6327 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:12:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.055244 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.085729 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.100721 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3cfd1e01-432a-479f-a783-6b9e1acfcb16\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04aa23baca3c99a463fd363ffaf50dc400529a0a3d4af30a55b2353eb8e2dd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c0a408a02f1929bf23ad55f145eee897886adc62cc66804f18387d36891c85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af51bb8f542b6d1141422c7d41cd987e860d572c385b90d3f38f5f57cbd902c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.111946 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.122753 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.122820 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.122843 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.122870 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.122891 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:34Z","lastTransitionTime":"2025-10-13T21:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.127992 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.142020 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.155490 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.170976 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nffnw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5bb2ee-abeb-4342-929a-d61e89f30351\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nffnw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.184885 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bd15308b-c34d-49c6-ae7b-0ec0bfd35ffa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d81c82ccbccf9dedc89e79f02bbb7e9d90dfcc4d8e3a3cab517bed3c59b80c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.205003 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.225419 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.225501 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.225518 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.225547 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.225567 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:34Z","lastTransitionTime":"2025-10-13T21:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.329103 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.329174 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.329190 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.329211 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.329224 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:34Z","lastTransitionTime":"2025-10-13T21:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.384606 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xr7rr_632b68ca-d2a4-4570-a0a2-8ea8d204fb59/kube-multus/0.log" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.384691 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xr7rr" event={"ID":"632b68ca-d2a4-4570-a0a2-8ea8d204fb59","Type":"ContainerStarted","Data":"99a0f1440b3ddf29def15a59eb83538501635a83344f4310c08091290ffd6d99"} Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.406560 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.427916 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.432219 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.432259 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.432271 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.432293 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.432314 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:34Z","lastTransitionTime":"2025-10-13T21:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.446179 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.472492 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.487671 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.515281 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.531843 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3cfd1e01-432a-479f-a783-6b9e1acfcb16\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04aa23baca3c99a463fd363ffaf50dc400529a0a3d4af30a55b2353eb8e2dd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c0a408a02f1929bf23ad55f145eee897886adc62cc66804f18387d36891c85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af51bb8f542b6d1141422c7d41cd987e860d572c385b90d3f38f5f57cbd902c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.535163 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.535405 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.535496 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.535633 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.535724 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:34Z","lastTransitionTime":"2025-10-13T21:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.547841 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.563614 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99a0f1440b3ddf29def15a59eb83538501635a83344f4310c08091290ffd6d99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:33Z\\\",\\\"message\\\":\\\"2025-10-13T21:11:47+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d67bd457-1d6d-4d1c-9433-278646395a6a\\\\n2025-10-13T21:11:47+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d67bd457-1d6d-4d1c-9433-278646395a6a to /host/opt/cni/bin/\\\\n2025-10-13T21:11:47Z [verbose] multus-daemon started\\\\n2025-10-13T21:11:47Z [verbose] Readiness Indicator file check\\\\n2025-10-13T21:12:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:12:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.585178 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:13Z\\\",\\\"message\\\":\\\"cy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1013 21:12:13.867886 6327 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1013 21:12:13.868348 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:12:13.868383 6327 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:12:13.868403 6327 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:12:13.868424 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:12:13.868445 6327 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:12:13.868464 6327 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:12:13.868509 6327 factory.go:656] Stopping watch factory\\\\nI1013 21:12:13.868545 6327 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:12:13.868561 6327 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:12:13.868574 6327 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:12:13.868610 6327 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 21:12:13.869148 6327 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:12:13.869214 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1013 21:12:13.869349 6327 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:12:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.598669 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.611008 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bd15308b-c34d-49c6-ae7b-0ec0bfd35ffa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d81c82ccbccf9dedc89e79f02bbb7e9d90dfcc4d8e3a3cab517bed3c59b80c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.627966 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.641043 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.641112 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.641130 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.641159 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.641178 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:34Z","lastTransitionTime":"2025-10-13T21:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.646248 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.662084 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.674290 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.690703 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.703419 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nffnw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5bb2ee-abeb-4342-929a-d61e89f30351\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nffnw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.718894 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:34Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.744020 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.744162 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.744280 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.744364 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.744445 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:34Z","lastTransitionTime":"2025-10-13T21:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.847457 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.847508 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.847520 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.847544 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.847557 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:34Z","lastTransitionTime":"2025-10-13T21:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.866887 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:34 crc kubenswrapper[4689]: E1013 21:12:34.867025 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.949996 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.950045 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.950059 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.950079 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:34 crc kubenswrapper[4689]: I1013 21:12:34.950092 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:34Z","lastTransitionTime":"2025-10-13T21:12:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.053459 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.053517 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.053536 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.053568 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.053616 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:35Z","lastTransitionTime":"2025-10-13T21:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.156116 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.156160 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.156170 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.156186 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.156202 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:35Z","lastTransitionTime":"2025-10-13T21:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.259110 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.259185 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.259202 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.259224 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.259238 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:35Z","lastTransitionTime":"2025-10-13T21:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.362652 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.362706 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.362718 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.362739 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.362756 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:35Z","lastTransitionTime":"2025-10-13T21:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.465254 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.465337 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.465365 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.465400 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.465425 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:35Z","lastTransitionTime":"2025-10-13T21:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.567846 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.567919 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.567932 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.567950 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.567963 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:35Z","lastTransitionTime":"2025-10-13T21:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.670427 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.670504 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.670526 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.670554 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.670574 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:35Z","lastTransitionTime":"2025-10-13T21:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.773389 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.773443 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.773453 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.773472 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.773482 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:35Z","lastTransitionTime":"2025-10-13T21:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.867492 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.867555 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.867504 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:35 crc kubenswrapper[4689]: E1013 21:12:35.867668 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:35 crc kubenswrapper[4689]: E1013 21:12:35.867917 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:35 crc kubenswrapper[4689]: E1013 21:12:35.867994 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.876976 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.877095 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.877157 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.877409 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.877445 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:35Z","lastTransitionTime":"2025-10-13T21:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.981073 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.981118 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.981129 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.981144 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:35 crc kubenswrapper[4689]: I1013 21:12:35.981155 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:35Z","lastTransitionTime":"2025-10-13T21:12:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.084377 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.084424 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.084434 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.084453 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.084466 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:36Z","lastTransitionTime":"2025-10-13T21:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.188699 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.188769 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.188791 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.188817 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.188835 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:36Z","lastTransitionTime":"2025-10-13T21:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.291976 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.292028 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.292042 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.292060 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.292076 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:36Z","lastTransitionTime":"2025-10-13T21:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.394785 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.394850 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.394863 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.394884 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.394901 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:36Z","lastTransitionTime":"2025-10-13T21:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.497879 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.497935 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.497947 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.497963 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.497974 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:36Z","lastTransitionTime":"2025-10-13T21:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.600662 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.600714 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.600725 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.600744 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.600756 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:36Z","lastTransitionTime":"2025-10-13T21:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.704489 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.704572 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.704634 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.704666 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.704684 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:36Z","lastTransitionTime":"2025-10-13T21:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.808466 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.808529 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.808547 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.808574 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.808624 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:36Z","lastTransitionTime":"2025-10-13T21:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.867283 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:36 crc kubenswrapper[4689]: E1013 21:12:36.867518 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.911597 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.911653 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.911665 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.911684 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:36 crc kubenswrapper[4689]: I1013 21:12:36.911696 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:36Z","lastTransitionTime":"2025-10-13T21:12:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.015872 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.015933 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.015947 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.015971 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.015986 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:37Z","lastTransitionTime":"2025-10-13T21:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.118636 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.118693 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.118704 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.118723 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.118733 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:37Z","lastTransitionTime":"2025-10-13T21:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.222468 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.222544 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.222565 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.222627 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.222657 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:37Z","lastTransitionTime":"2025-10-13T21:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.326294 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.326368 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.326390 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.326416 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.326436 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:37Z","lastTransitionTime":"2025-10-13T21:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.429244 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.429298 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.429309 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.429329 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.429342 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:37Z","lastTransitionTime":"2025-10-13T21:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.531892 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.532728 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.532751 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.532774 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.532790 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:37Z","lastTransitionTime":"2025-10-13T21:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.635999 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.636035 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.636043 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.636057 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.636066 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:37Z","lastTransitionTime":"2025-10-13T21:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.739205 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.739258 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.739275 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.739298 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.739314 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:37Z","lastTransitionTime":"2025-10-13T21:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.841797 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.841850 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.841863 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.841883 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.841897 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:37Z","lastTransitionTime":"2025-10-13T21:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.867679 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:37 crc kubenswrapper[4689]: E1013 21:12:37.867844 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.867679 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:37 crc kubenswrapper[4689]: E1013 21:12:37.867917 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.868240 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:37 crc kubenswrapper[4689]: E1013 21:12:37.868904 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.945195 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.946070 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.946270 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.946421 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:37 crc kubenswrapper[4689]: I1013 21:12:37.946670 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:37Z","lastTransitionTime":"2025-10-13T21:12:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.050647 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.050727 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.050743 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.050763 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.050775 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:38Z","lastTransitionTime":"2025-10-13T21:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.158040 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.158475 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.158703 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.158876 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.159031 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:38Z","lastTransitionTime":"2025-10-13T21:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.263013 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.263551 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.263761 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.263901 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.264194 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:38Z","lastTransitionTime":"2025-10-13T21:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.367614 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.367833 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.367974 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.368208 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.368343 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:38Z","lastTransitionTime":"2025-10-13T21:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.470537 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.470634 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.470654 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.470684 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.470705 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:38Z","lastTransitionTime":"2025-10-13T21:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.574411 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.575480 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.575696 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.575875 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.576040 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:38Z","lastTransitionTime":"2025-10-13T21:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.684283 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.684342 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.684359 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.684386 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.684406 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:38Z","lastTransitionTime":"2025-10-13T21:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.787612 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.787689 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.787707 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.787736 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.787761 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:38Z","lastTransitionTime":"2025-10-13T21:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.867325 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:38 crc kubenswrapper[4689]: E1013 21:12:38.867564 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.890810 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.890842 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.890853 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.890867 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.890877 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:38Z","lastTransitionTime":"2025-10-13T21:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.994551 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.994645 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.994659 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.994684 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:38 crc kubenswrapper[4689]: I1013 21:12:38.994700 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:38Z","lastTransitionTime":"2025-10-13T21:12:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.098342 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.098405 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.098423 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.098453 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.098472 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:39Z","lastTransitionTime":"2025-10-13T21:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.202531 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.202974 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.203198 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.203425 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.203667 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:39Z","lastTransitionTime":"2025-10-13T21:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.306553 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.306664 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.306688 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.306721 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.306743 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:39Z","lastTransitionTime":"2025-10-13T21:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.411308 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.411384 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.411406 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.411434 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.411453 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:39Z","lastTransitionTime":"2025-10-13T21:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.516746 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.516844 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.516866 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.516925 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.516946 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:39Z","lastTransitionTime":"2025-10-13T21:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.621127 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.621299 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.621378 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.621418 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.621504 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:39Z","lastTransitionTime":"2025-10-13T21:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.725762 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.725845 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.725866 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.725919 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.725938 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:39Z","lastTransitionTime":"2025-10-13T21:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.829938 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.830012 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.830030 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.830060 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.830079 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:39Z","lastTransitionTime":"2025-10-13T21:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.867282 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.867342 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.867467 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:39 crc kubenswrapper[4689]: E1013 21:12:39.867501 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:39 crc kubenswrapper[4689]: E1013 21:12:39.867802 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:39 crc kubenswrapper[4689]: E1013 21:12:39.867905 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.934086 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.934149 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.934169 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.934194 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:39 crc kubenswrapper[4689]: I1013 21:12:39.934212 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:39Z","lastTransitionTime":"2025-10-13T21:12:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.037580 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.037690 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.037708 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.037768 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.037787 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:40Z","lastTransitionTime":"2025-10-13T21:12:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.141793 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.141868 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.141890 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.141920 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.142018 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:40Z","lastTransitionTime":"2025-10-13T21:12:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.245743 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.245867 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.245887 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.245922 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.245944 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:40Z","lastTransitionTime":"2025-10-13T21:12:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.350370 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.350455 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.350478 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.350516 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.350541 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:40Z","lastTransitionTime":"2025-10-13T21:12:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.454793 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.454871 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.454890 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.454922 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.454942 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:40Z","lastTransitionTime":"2025-10-13T21:12:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.558806 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.558928 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.558945 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.558973 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.558991 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:40Z","lastTransitionTime":"2025-10-13T21:12:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.663232 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.663318 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.663340 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.663371 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.663394 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:40Z","lastTransitionTime":"2025-10-13T21:12:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.766504 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.766572 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.766619 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.766650 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.766669 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:40Z","lastTransitionTime":"2025-10-13T21:12:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.867528 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:40 crc kubenswrapper[4689]: E1013 21:12:40.867796 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.869105 4689 scope.go:117] "RemoveContainer" containerID="013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.870998 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.871090 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.871115 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.871834 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:40 crc kubenswrapper[4689]: I1013 21:12:40.871905 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:40Z","lastTransitionTime":"2025-10-13T21:12:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.008496 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.008561 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.008581 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.008639 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.008659 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:41Z","lastTransitionTime":"2025-10-13T21:12:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.112646 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.112712 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.112731 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.112760 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.112779 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:41Z","lastTransitionTime":"2025-10-13T21:12:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.221569 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.221634 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.221648 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.221673 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.221688 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:41Z","lastTransitionTime":"2025-10-13T21:12:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.229968 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.230024 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.230037 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.230057 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.230071 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:41Z","lastTransitionTime":"2025-10-13T21:12:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:41 crc kubenswrapper[4689]: E1013 21:12:41.248796 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.254597 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.254635 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.254646 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.254664 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.254679 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:41Z","lastTransitionTime":"2025-10-13T21:12:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:41 crc kubenswrapper[4689]: E1013 21:12:41.276779 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.283116 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.283177 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.283191 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.283216 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.283231 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:41Z","lastTransitionTime":"2025-10-13T21:12:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:41 crc kubenswrapper[4689]: E1013 21:12:41.301270 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.306820 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.306879 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.306895 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.306917 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.306931 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:41Z","lastTransitionTime":"2025-10-13T21:12:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:41 crc kubenswrapper[4689]: E1013 21:12:41.327475 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.333181 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.333243 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.333263 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.333291 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.333312 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:41Z","lastTransitionTime":"2025-10-13T21:12:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:41 crc kubenswrapper[4689]: E1013 21:12:41.357257 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: E1013 21:12:41.357520 4689 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.359872 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.359921 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.359938 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.359964 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.359977 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:41Z","lastTransitionTime":"2025-10-13T21:12:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.414629 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovnkube-controller/2.log" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.417487 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerStarted","Data":"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1"} Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.418192 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.432301 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.444682 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.459116 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.463492 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.463699 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.463781 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.463861 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.463923 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:41Z","lastTransitionTime":"2025-10-13T21:12:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.470171 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.487069 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.500727 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.514341 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.531838 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99a0f1440b3ddf29def15a59eb83538501635a83344f4310c08091290ffd6d99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:33Z\\\",\\\"message\\\":\\\"2025-10-13T21:11:47+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d67bd457-1d6d-4d1c-9433-278646395a6a\\\\n2025-10-13T21:11:47+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d67bd457-1d6d-4d1c-9433-278646395a6a to /host/opt/cni/bin/\\\\n2025-10-13T21:11:47Z [verbose] multus-daemon started\\\\n2025-10-13T21:11:47Z [verbose] Readiness Indicator file check\\\\n2025-10-13T21:12:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:12:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.554569 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:13Z\\\",\\\"message\\\":\\\"cy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1013 21:12:13.867886 6327 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1013 21:12:13.868348 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:12:13.868383 6327 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:12:13.868403 6327 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:12:13.868424 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:12:13.868445 6327 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:12:13.868464 6327 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:12:13.868509 6327 factory.go:656] Stopping watch factory\\\\nI1013 21:12:13.868545 6327 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:12:13.868561 6327 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:12:13.868574 6327 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:12:13.868610 6327 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 21:12:13.869148 6327 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:12:13.869214 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1013 21:12:13.869349 6327 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:12:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:12:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.565990 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.566040 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.566050 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.566069 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.566083 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:41Z","lastTransitionTime":"2025-10-13T21:12:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.570688 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.599025 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.614452 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3cfd1e01-432a-479f-a783-6b9e1acfcb16\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04aa23baca3c99a463fd363ffaf50dc400529a0a3d4af30a55b2353eb8e2dd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c0a408a02f1929bf23ad55f145eee897886adc62cc66804f18387d36891c85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af51bb8f542b6d1141422c7d41cd987e860d572c385b90d3f38f5f57cbd902c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.628551 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.640167 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.651442 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.661338 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.668347 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.668973 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.669041 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.669108 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.669191 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:41Z","lastTransitionTime":"2025-10-13T21:12:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.675835 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nffnw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5bb2ee-abeb-4342-929a-d61e89f30351\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nffnw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.686691 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bd15308b-c34d-49c6-ae7b-0ec0bfd35ffa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d81c82ccbccf9dedc89e79f02bbb7e9d90dfcc4d8e3a3cab517bed3c59b80c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.703717 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:41Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.772817 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.773214 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.773351 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.773540 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.773739 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:41Z","lastTransitionTime":"2025-10-13T21:12:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.867384 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.867433 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:41 crc kubenswrapper[4689]: E1013 21:12:41.867602 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.867721 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:41 crc kubenswrapper[4689]: E1013 21:12:41.867808 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:41 crc kubenswrapper[4689]: E1013 21:12:41.867979 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.876338 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.876523 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.876646 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.876798 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.876924 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:41Z","lastTransitionTime":"2025-10-13T21:12:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.980123 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.980183 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.980197 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.980217 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:41 crc kubenswrapper[4689]: I1013 21:12:41.980231 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:41Z","lastTransitionTime":"2025-10-13T21:12:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.083517 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.083613 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.083628 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.083681 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.083704 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:42Z","lastTransitionTime":"2025-10-13T21:12:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.187817 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.187898 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.187914 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.187958 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.187976 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:42Z","lastTransitionTime":"2025-10-13T21:12:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.291654 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.291731 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.291749 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.291777 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.291797 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:42Z","lastTransitionTime":"2025-10-13T21:12:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.395648 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.395732 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.395754 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.395787 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.395813 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:42Z","lastTransitionTime":"2025-10-13T21:12:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.425394 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovnkube-controller/3.log" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.426482 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovnkube-controller/2.log" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.431518 4689 generic.go:334] "Generic (PLEG): container finished" podID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerID="7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1" exitCode=1 Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.431645 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerDied","Data":"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1"} Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.431984 4689 scope.go:117] "RemoveContainer" containerID="013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.433546 4689 scope.go:117] "RemoveContainer" containerID="7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1" Oct 13 21:12:42 crc kubenswrapper[4689]: E1013 21:12:42.433914 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.462889 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.487834 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.499027 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.499079 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.499091 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.499112 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.499130 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:42Z","lastTransitionTime":"2025-10-13T21:12:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.508095 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.536180 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.555490 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.581256 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.603087 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.603203 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.603224 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.603257 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.603280 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:42Z","lastTransitionTime":"2025-10-13T21:12:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.603395 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3cfd1e01-432a-479f-a783-6b9e1acfcb16\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04aa23baca3c99a463fd363ffaf50dc400529a0a3d4af30a55b2353eb8e2dd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c0a408a02f1929bf23ad55f145eee897886adc62cc66804f18387d36891c85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af51bb8f542b6d1141422c7d41cd987e860d572c385b90d3f38f5f57cbd902c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.626237 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.645123 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99a0f1440b3ddf29def15a59eb83538501635a83344f4310c08091290ffd6d99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:33Z\\\",\\\"message\\\":\\\"2025-10-13T21:11:47+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d67bd457-1d6d-4d1c-9433-278646395a6a\\\\n2025-10-13T21:11:47+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d67bd457-1d6d-4d1c-9433-278646395a6a to /host/opt/cni/bin/\\\\n2025-10-13T21:11:47Z [verbose] multus-daemon started\\\\n2025-10-13T21:11:47Z [verbose] Readiness Indicator file check\\\\n2025-10-13T21:12:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:12:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.677791 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://013fcc4a0434f5e8e486eca9c1c3f0fc55786eb9b70e4daac196fbb7ed1b930f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:13Z\\\",\\\"message\\\":\\\"cy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1013 21:12:13.867886 6327 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1013 21:12:13.868348 6327 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1013 21:12:13.868383 6327 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1013 21:12:13.868403 6327 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1013 21:12:13.868424 6327 handler.go:208] Removed *v1.Node event handler 2\\\\nI1013 21:12:13.868445 6327 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:12:13.868464 6327 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:12:13.868509 6327 factory.go:656] Stopping watch factory\\\\nI1013 21:12:13.868545 6327 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:12:13.868561 6327 handler.go:208] Removed *v1.Node event handler 7\\\\nI1013 21:12:13.868574 6327 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1013 21:12:13.868610 6327 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 21:12:13.869148 6327 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:12:13.869214 6327 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1013 21:12:13.869349 6327 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:12:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"ller event handlers\\\\nI1013 21:12:41.846715 6670 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:12:41.846770 6670 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 21:12:41.846783 6670 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 21:12:41.846814 6670 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:12:41.846849 6670 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 21:12:41.846867 6670 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 21:12:41.846852 6670 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 21:12:41.846887 6670 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 21:12:41.846905 6670 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 21:12:41.846849 6670 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:12:41.846945 6670 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:12:41.846952 6670 factory.go:656] Stopping watch factory\\\\nI1013 21:12:41.846984 6670 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:12:41.847005 6670 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:12:41.847031 6670 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1013 21:12:41.847108 6670 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:12:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.698497 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.706820 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.706882 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.706900 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.706921 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.706935 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:42Z","lastTransitionTime":"2025-10-13T21:12:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.733255 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.755828 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.776854 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.796365 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.810942 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.811009 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.811025 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.811049 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.811064 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:42Z","lastTransitionTime":"2025-10-13T21:12:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.818449 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.835643 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.848788 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nffnw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5bb2ee-abeb-4342-929a-d61e89f30351\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nffnw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.864427 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bd15308b-c34d-49c6-ae7b-0ec0bfd35ffa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d81c82ccbccf9dedc89e79f02bbb7e9d90dfcc4d8e3a3cab517bed3c59b80c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:42Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.866701 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:42 crc kubenswrapper[4689]: E1013 21:12:42.866933 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.914452 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.914497 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.914507 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.914525 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:42 crc kubenswrapper[4689]: I1013 21:12:42.914535 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:42Z","lastTransitionTime":"2025-10-13T21:12:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.017809 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.017859 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.017872 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.017892 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.017905 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:43Z","lastTransitionTime":"2025-10-13T21:12:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.120984 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.121054 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.121073 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.121097 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.121115 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:43Z","lastTransitionTime":"2025-10-13T21:12:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.226559 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.226644 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.226664 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.226687 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.226713 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:43Z","lastTransitionTime":"2025-10-13T21:12:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.330545 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.330650 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.330674 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.330708 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.330729 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:43Z","lastTransitionTime":"2025-10-13T21:12:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.434824 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.434893 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.434908 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.434932 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.434952 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:43Z","lastTransitionTime":"2025-10-13T21:12:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.439701 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovnkube-controller/3.log" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.444607 4689 scope.go:117] "RemoveContainer" containerID="7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1" Oct 13 21:12:43 crc kubenswrapper[4689]: E1013 21:12:43.444808 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.462660 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.479822 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nffnw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5bb2ee-abeb-4342-929a-d61e89f30351\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nffnw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.496003 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bd15308b-c34d-49c6-ae7b-0ec0bfd35ffa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d81c82ccbccf9dedc89e79f02bbb7e9d90dfcc4d8e3a3cab517bed3c59b80c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.516502 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.532127 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.538748 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.538822 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.538841 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.538861 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.538933 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:43Z","lastTransitionTime":"2025-10-13T21:12:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.546069 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.566554 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.591004 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.615636 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.635794 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.642545 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.642636 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.642650 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.642672 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.642687 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:43Z","lastTransitionTime":"2025-10-13T21:12:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.654094 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.676353 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.693333 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.712743 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.742018 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.745320 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.745359 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.745372 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.745387 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.745398 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:43Z","lastTransitionTime":"2025-10-13T21:12:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.757424 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3cfd1e01-432a-479f-a783-6b9e1acfcb16\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04aa23baca3c99a463fd363ffaf50dc400529a0a3d4af30a55b2353eb8e2dd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c0a408a02f1929bf23ad55f145eee897886adc62cc66804f18387d36891c85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af51bb8f542b6d1141422c7d41cd987e860d572c385b90d3f38f5f57cbd902c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.772419 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.785808 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99a0f1440b3ddf29def15a59eb83538501635a83344f4310c08091290ffd6d99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:33Z\\\",\\\"message\\\":\\\"2025-10-13T21:11:47+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d67bd457-1d6d-4d1c-9433-278646395a6a\\\\n2025-10-13T21:11:47+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d67bd457-1d6d-4d1c-9433-278646395a6a to /host/opt/cni/bin/\\\\n2025-10-13T21:11:47Z [verbose] multus-daemon started\\\\n2025-10-13T21:11:47Z [verbose] Readiness Indicator file check\\\\n2025-10-13T21:12:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:12:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.803404 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"ller event handlers\\\\nI1013 21:12:41.846715 6670 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:12:41.846770 6670 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 21:12:41.846783 6670 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 21:12:41.846814 6670 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:12:41.846849 6670 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 21:12:41.846867 6670 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 21:12:41.846852 6670 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 21:12:41.846887 6670 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 21:12:41.846905 6670 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 21:12:41.846849 6670 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:12:41.846945 6670 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:12:41.846952 6670 factory.go:656] Stopping watch factory\\\\nI1013 21:12:41.846984 6670 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:12:41.847005 6670 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:12:41.847031 6670 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1013 21:12:41.847108 6670 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:12:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.848529 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.848612 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.848630 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.848652 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.848664 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:43Z","lastTransitionTime":"2025-10-13T21:12:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.866899 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.866901 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.867004 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:43 crc kubenswrapper[4689]: E1013 21:12:43.867205 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:43 crc kubenswrapper[4689]: E1013 21:12:43.867326 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:43 crc kubenswrapper[4689]: E1013 21:12:43.867433 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.880171 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nffnw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5bb2ee-abeb-4342-929a-d61e89f30351\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nffnw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.890996 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bd15308b-c34d-49c6-ae7b-0ec0bfd35ffa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d81c82ccbccf9dedc89e79f02bbb7e9d90dfcc4d8e3a3cab517bed3c59b80c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.905929 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.924493 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.941900 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.953539 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.953576 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.953610 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.953629 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.953644 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:43Z","lastTransitionTime":"2025-10-13T21:12:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.958345 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.974184 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:43 crc kubenswrapper[4689]: I1013 21:12:43.992540 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:43Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.017721 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:44Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.032405 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:44Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.053637 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:44Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.055771 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.055992 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.056047 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.056075 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.056089 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:44Z","lastTransitionTime":"2025-10-13T21:12:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.068771 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:44Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.079662 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:44Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.101751 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:44Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.115378 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3cfd1e01-432a-479f-a783-6b9e1acfcb16\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04aa23baca3c99a463fd363ffaf50dc400529a0a3d4af30a55b2353eb8e2dd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c0a408a02f1929bf23ad55f145eee897886adc62cc66804f18387d36891c85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af51bb8f542b6d1141422c7d41cd987e860d572c385b90d3f38f5f57cbd902c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:44Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.129068 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:44Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.144532 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99a0f1440b3ddf29def15a59eb83538501635a83344f4310c08091290ffd6d99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:33Z\\\",\\\"message\\\":\\\"2025-10-13T21:11:47+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d67bd457-1d6d-4d1c-9433-278646395a6a\\\\n2025-10-13T21:11:47+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d67bd457-1d6d-4d1c-9433-278646395a6a to /host/opt/cni/bin/\\\\n2025-10-13T21:11:47Z [verbose] multus-daemon started\\\\n2025-10-13T21:11:47Z [verbose] Readiness Indicator file check\\\\n2025-10-13T21:12:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:12:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:44Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.160848 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.160911 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.160926 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.160945 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.160956 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:44Z","lastTransitionTime":"2025-10-13T21:12:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.165414 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"ller event handlers\\\\nI1013 21:12:41.846715 6670 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:12:41.846770 6670 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 21:12:41.846783 6670 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 21:12:41.846814 6670 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:12:41.846849 6670 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 21:12:41.846867 6670 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 21:12:41.846852 6670 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 21:12:41.846887 6670 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 21:12:41.846905 6670 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 21:12:41.846849 6670 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:12:41.846945 6670 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:12:41.846952 6670 factory.go:656] Stopping watch factory\\\\nI1013 21:12:41.846984 6670 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:12:41.847005 6670 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:12:41.847031 6670 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1013 21:12:41.847108 6670 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:12:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:44Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.179416 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:44Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.262576 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.262635 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.262647 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.262667 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.262680 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:44Z","lastTransitionTime":"2025-10-13T21:12:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.365481 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.365530 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.365542 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.365563 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.365578 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:44Z","lastTransitionTime":"2025-10-13T21:12:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.468636 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.468698 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.468715 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.468738 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.468757 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:44Z","lastTransitionTime":"2025-10-13T21:12:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.573827 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.573890 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.573909 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.573935 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.573953 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:44Z","lastTransitionTime":"2025-10-13T21:12:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.677026 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.677093 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.677105 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.677127 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.677139 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:44Z","lastTransitionTime":"2025-10-13T21:12:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.779549 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.779642 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.779661 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.779689 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.779714 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:44Z","lastTransitionTime":"2025-10-13T21:12:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.867143 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:44 crc kubenswrapper[4689]: E1013 21:12:44.867353 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.882128 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.882182 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.882196 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.882218 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.882234 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:44Z","lastTransitionTime":"2025-10-13T21:12:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.985422 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.985487 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.985507 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.985532 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:44 crc kubenswrapper[4689]: I1013 21:12:44.985550 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:44Z","lastTransitionTime":"2025-10-13T21:12:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.088182 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.088270 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.088298 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.088348 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.088374 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:45Z","lastTransitionTime":"2025-10-13T21:12:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.191740 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.191783 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.191795 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.191812 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.191824 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:45Z","lastTransitionTime":"2025-10-13T21:12:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.295048 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.295091 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.295110 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.295132 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.295144 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:45Z","lastTransitionTime":"2025-10-13T21:12:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.397996 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.398041 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.398054 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.398071 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.398086 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:45Z","lastTransitionTime":"2025-10-13T21:12:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.501221 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.501272 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.501283 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.501301 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.501314 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:45Z","lastTransitionTime":"2025-10-13T21:12:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.604249 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.604307 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.604323 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.604344 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.604358 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:45Z","lastTransitionTime":"2025-10-13T21:12:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.707104 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.707150 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.707159 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.707173 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.707182 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:45Z","lastTransitionTime":"2025-10-13T21:12:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.810759 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.810826 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.810850 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.810883 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.810908 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:45Z","lastTransitionTime":"2025-10-13T21:12:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.866918 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.867001 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:45 crc kubenswrapper[4689]: E1013 21:12:45.867121 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.866928 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:45 crc kubenswrapper[4689]: E1013 21:12:45.867386 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:45 crc kubenswrapper[4689]: E1013 21:12:45.867307 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.913246 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.913287 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.913295 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.913309 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:45 crc kubenswrapper[4689]: I1013 21:12:45.913318 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:45Z","lastTransitionTime":"2025-10-13T21:12:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.016841 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.016887 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.016897 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.016915 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.016929 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:46Z","lastTransitionTime":"2025-10-13T21:12:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.119978 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.120010 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.120018 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.120034 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.120042 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:46Z","lastTransitionTime":"2025-10-13T21:12:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.224087 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.224151 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.224167 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.224188 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.224200 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:46Z","lastTransitionTime":"2025-10-13T21:12:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.331212 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.331270 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.331284 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.331307 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.331324 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:46Z","lastTransitionTime":"2025-10-13T21:12:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.434867 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.434924 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.434941 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.434968 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.434986 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:46Z","lastTransitionTime":"2025-10-13T21:12:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.538315 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.538369 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.538380 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.538396 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.538408 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:46Z","lastTransitionTime":"2025-10-13T21:12:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.641462 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.641565 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.641633 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.641657 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.641673 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:46Z","lastTransitionTime":"2025-10-13T21:12:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.744831 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.744908 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.744925 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.744954 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.744971 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:46Z","lastTransitionTime":"2025-10-13T21:12:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.847725 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.847798 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.847815 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.847841 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.847857 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:46Z","lastTransitionTime":"2025-10-13T21:12:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.867312 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:46 crc kubenswrapper[4689]: E1013 21:12:46.867472 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.950408 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.950507 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.950532 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.950554 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:46 crc kubenswrapper[4689]: I1013 21:12:46.950571 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:46Z","lastTransitionTime":"2025-10-13T21:12:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.053381 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.053428 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.053440 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.053457 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.053470 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:47Z","lastTransitionTime":"2025-10-13T21:12:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.156557 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.156620 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.156631 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.156647 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.156656 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:47Z","lastTransitionTime":"2025-10-13T21:12:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.259917 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.259993 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.260018 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.260046 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.260072 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:47Z","lastTransitionTime":"2025-10-13T21:12:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.363544 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.363657 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.363700 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.363732 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.363756 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:47Z","lastTransitionTime":"2025-10-13T21:12:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.466973 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.467051 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.467103 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.467130 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.467153 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:47Z","lastTransitionTime":"2025-10-13T21:12:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.483448 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.483567 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:47 crc kubenswrapper[4689]: E1013 21:12:47.483676 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:51.483639932 +0000 UTC m=+148.401885017 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:12:47 crc kubenswrapper[4689]: E1013 21:12:47.483750 4689 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.483809 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:47 crc kubenswrapper[4689]: E1013 21:12:47.483840 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 21:13:51.483813136 +0000 UTC m=+148.402058431 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 13 21:12:47 crc kubenswrapper[4689]: E1013 21:12:47.483936 4689 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 21:12:47 crc kubenswrapper[4689]: E1013 21:12:47.483987 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-13 21:13:51.48398071 +0000 UTC m=+148.402225795 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.570216 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.570293 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.570311 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.570339 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.570359 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:47Z","lastTransitionTime":"2025-10-13T21:12:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.584734 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.584821 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:47 crc kubenswrapper[4689]: E1013 21:12:47.585011 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 21:12:47 crc kubenswrapper[4689]: E1013 21:12:47.585049 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 21:12:47 crc kubenswrapper[4689]: E1013 21:12:47.585070 4689 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:12:47 crc kubenswrapper[4689]: E1013 21:12:47.585067 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 13 21:12:47 crc kubenswrapper[4689]: E1013 21:12:47.585110 4689 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 13 21:12:47 crc kubenswrapper[4689]: E1013 21:12:47.585128 4689 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:12:47 crc kubenswrapper[4689]: E1013 21:12:47.585155 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-13 21:13:51.585132841 +0000 UTC m=+148.503377966 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:12:47 crc kubenswrapper[4689]: E1013 21:12:47.585186 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-13 21:13:51.585168702 +0000 UTC m=+148.503413817 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.674169 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.674245 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.674262 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.674288 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.674306 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:47Z","lastTransitionTime":"2025-10-13T21:12:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.776727 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.776797 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.776816 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.776851 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.776892 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:47Z","lastTransitionTime":"2025-10-13T21:12:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.867316 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.867393 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.867317 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:47 crc kubenswrapper[4689]: E1013 21:12:47.867642 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:47 crc kubenswrapper[4689]: E1013 21:12:47.867745 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:47 crc kubenswrapper[4689]: E1013 21:12:47.867901 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.880366 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.880449 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.880464 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.880510 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.880527 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:47Z","lastTransitionTime":"2025-10-13T21:12:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.984072 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.984160 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.984184 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.984219 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:47 crc kubenswrapper[4689]: I1013 21:12:47.984244 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:47Z","lastTransitionTime":"2025-10-13T21:12:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.087730 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.087811 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.087835 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.087874 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.087898 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:48Z","lastTransitionTime":"2025-10-13T21:12:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.190750 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.190814 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.190825 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.190846 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.190859 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:48Z","lastTransitionTime":"2025-10-13T21:12:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.297779 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.297877 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.297908 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.297938 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.297963 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:48Z","lastTransitionTime":"2025-10-13T21:12:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.401237 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.401304 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.401323 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.401348 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.401368 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:48Z","lastTransitionTime":"2025-10-13T21:12:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.504570 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.504729 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.504752 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.504776 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.504794 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:48Z","lastTransitionTime":"2025-10-13T21:12:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.608087 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.608683 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.608843 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.609047 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.609193 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:48Z","lastTransitionTime":"2025-10-13T21:12:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.712577 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.712681 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.712707 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.712744 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.712769 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:48Z","lastTransitionTime":"2025-10-13T21:12:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.816577 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.816705 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.816730 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.816763 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.816788 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:48Z","lastTransitionTime":"2025-10-13T21:12:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.867377 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:48 crc kubenswrapper[4689]: E1013 21:12:48.867631 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.920571 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.920689 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.920706 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.920736 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:48 crc kubenswrapper[4689]: I1013 21:12:48.920755 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:48Z","lastTransitionTime":"2025-10-13T21:12:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.023292 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.023374 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.023389 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.023411 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.023427 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:49Z","lastTransitionTime":"2025-10-13T21:12:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.126220 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.126423 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.126436 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.126457 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.126471 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:49Z","lastTransitionTime":"2025-10-13T21:12:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.230180 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.230261 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.230279 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.230308 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.230331 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:49Z","lastTransitionTime":"2025-10-13T21:12:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.334114 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.334182 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.334202 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.334229 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.334249 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:49Z","lastTransitionTime":"2025-10-13T21:12:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.438692 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.438794 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.438820 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.438859 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.438883 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:49Z","lastTransitionTime":"2025-10-13T21:12:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.542214 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.542290 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.542307 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.542340 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.542359 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:49Z","lastTransitionTime":"2025-10-13T21:12:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.645211 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.645286 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.645305 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.645343 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.645365 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:49Z","lastTransitionTime":"2025-10-13T21:12:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.748896 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.748957 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.748971 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.748992 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.749004 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:49Z","lastTransitionTime":"2025-10-13T21:12:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.853221 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.853305 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.853321 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.853346 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.853363 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:49Z","lastTransitionTime":"2025-10-13T21:12:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.867335 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:49 crc kubenswrapper[4689]: E1013 21:12:49.867565 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.867646 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:49 crc kubenswrapper[4689]: E1013 21:12:49.867837 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.867673 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:49 crc kubenswrapper[4689]: E1013 21:12:49.867963 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.957361 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.957430 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.957451 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.957479 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:49 crc kubenswrapper[4689]: I1013 21:12:49.957497 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:49Z","lastTransitionTime":"2025-10-13T21:12:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.060646 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.060719 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.060739 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.060767 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.060790 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:50Z","lastTransitionTime":"2025-10-13T21:12:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.164511 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.164642 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.164674 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.164707 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.164732 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:50Z","lastTransitionTime":"2025-10-13T21:12:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.267672 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.267761 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.267784 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.267811 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.267832 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:50Z","lastTransitionTime":"2025-10-13T21:12:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.371717 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.371799 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.371821 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.371855 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.371878 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:50Z","lastTransitionTime":"2025-10-13T21:12:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.475226 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.475315 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.475341 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.475375 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.475394 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:50Z","lastTransitionTime":"2025-10-13T21:12:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.578720 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.578759 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.578768 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.578783 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.578793 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:50Z","lastTransitionTime":"2025-10-13T21:12:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.681434 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.681514 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.681539 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.681568 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.681631 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:50Z","lastTransitionTime":"2025-10-13T21:12:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.784574 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.784688 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.784704 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.784728 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.784742 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:50Z","lastTransitionTime":"2025-10-13T21:12:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.866865 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:50 crc kubenswrapper[4689]: E1013 21:12:50.867103 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.888430 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.888522 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.888544 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.888632 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.888656 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:50Z","lastTransitionTime":"2025-10-13T21:12:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.992767 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.992872 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.992894 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.992926 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:50 crc kubenswrapper[4689]: I1013 21:12:50.992955 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:50Z","lastTransitionTime":"2025-10-13T21:12:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.097566 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.097690 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.097709 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.097740 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.097760 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:51Z","lastTransitionTime":"2025-10-13T21:12:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.201544 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.201668 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.201689 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.201718 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.201740 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:51Z","lastTransitionTime":"2025-10-13T21:12:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.305189 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.305295 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.305329 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.305367 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.305392 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:51Z","lastTransitionTime":"2025-10-13T21:12:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.410143 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.410207 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.410224 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.410250 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.410269 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:51Z","lastTransitionTime":"2025-10-13T21:12:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.483970 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.484061 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.484087 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.484121 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.484147 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:51Z","lastTransitionTime":"2025-10-13T21:12:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:51 crc kubenswrapper[4689]: E1013 21:12:51.508944 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.514933 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.515006 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.515026 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.515063 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.515082 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:51Z","lastTransitionTime":"2025-10-13T21:12:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:51 crc kubenswrapper[4689]: E1013 21:12:51.535553 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.541507 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.541623 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.541644 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.541674 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.541699 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:51Z","lastTransitionTime":"2025-10-13T21:12:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:51 crc kubenswrapper[4689]: E1013 21:12:51.565157 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.571148 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.571222 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.571243 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.571276 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.571322 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:51Z","lastTransitionTime":"2025-10-13T21:12:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:51 crc kubenswrapper[4689]: E1013 21:12:51.587700 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.592487 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.592532 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.592550 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.592574 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.592642 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:51Z","lastTransitionTime":"2025-10-13T21:12:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:51 crc kubenswrapper[4689]: E1013 21:12:51.607646 4689 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"5dcf8212-d44f-4948-b8a0-e9c7ca68ad65\\\",\\\"systemUUID\\\":\\\"3639096d-e021-4f30-b44c-3e32b233f5a5\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:51Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:51 crc kubenswrapper[4689]: E1013 21:12:51.607796 4689 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.610045 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.610094 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.610110 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.610132 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.610146 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:51Z","lastTransitionTime":"2025-10-13T21:12:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.714395 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.714474 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.714492 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.714519 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.714538 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:51Z","lastTransitionTime":"2025-10-13T21:12:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.818364 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.818448 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.818470 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.818504 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.818528 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:51Z","lastTransitionTime":"2025-10-13T21:12:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.867772 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.867838 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.867802 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:51 crc kubenswrapper[4689]: E1013 21:12:51.868040 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:51 crc kubenswrapper[4689]: E1013 21:12:51.868251 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:51 crc kubenswrapper[4689]: E1013 21:12:51.868477 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.922771 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.922850 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.922871 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.922899 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:51 crc kubenswrapper[4689]: I1013 21:12:51.922920 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:51Z","lastTransitionTime":"2025-10-13T21:12:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.027270 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.027368 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.027390 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.027419 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.027439 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:52Z","lastTransitionTime":"2025-10-13T21:12:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.132697 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.132871 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.132901 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.132970 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.132992 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:52Z","lastTransitionTime":"2025-10-13T21:12:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.235794 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.235849 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.235866 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.235892 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.235911 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:52Z","lastTransitionTime":"2025-10-13T21:12:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.339080 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.339146 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.339160 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.339183 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.339200 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:52Z","lastTransitionTime":"2025-10-13T21:12:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.443417 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.443505 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.443526 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.443557 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.443580 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:52Z","lastTransitionTime":"2025-10-13T21:12:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.546887 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.546927 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.546936 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.546951 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.546961 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:52Z","lastTransitionTime":"2025-10-13T21:12:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.650341 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.650419 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.650438 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.650468 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.650489 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:52Z","lastTransitionTime":"2025-10-13T21:12:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.754242 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.754353 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.754373 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.754405 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.754427 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:52Z","lastTransitionTime":"2025-10-13T21:12:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.858164 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.858227 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.858246 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.858271 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.858289 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:52Z","lastTransitionTime":"2025-10-13T21:12:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.866972 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:52 crc kubenswrapper[4689]: E1013 21:12:52.867292 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.960908 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.960973 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.960995 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.961017 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:52 crc kubenswrapper[4689]: I1013 21:12:52.961036 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:52Z","lastTransitionTime":"2025-10-13T21:12:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.065825 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.065881 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.065892 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.065913 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.065927 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:53Z","lastTransitionTime":"2025-10-13T21:12:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.170351 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.170425 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.170442 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.170471 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.170491 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:53Z","lastTransitionTime":"2025-10-13T21:12:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.274055 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.274102 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.274114 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.274138 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.274152 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:53Z","lastTransitionTime":"2025-10-13T21:12:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.377375 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.377439 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.377455 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.377476 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.377492 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:53Z","lastTransitionTime":"2025-10-13T21:12:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.480727 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.480803 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.480828 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.480869 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.480898 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:53Z","lastTransitionTime":"2025-10-13T21:12:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.584115 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.584187 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.584205 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.584234 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.584253 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:53Z","lastTransitionTime":"2025-10-13T21:12:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.688182 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.688272 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.688290 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.688323 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.688342 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:53Z","lastTransitionTime":"2025-10-13T21:12:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.792646 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.792714 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.792782 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.792814 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.792833 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:53Z","lastTransitionTime":"2025-10-13T21:12:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.866827 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.867052 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.867185 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:53 crc kubenswrapper[4689]: E1013 21:12:53.867713 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:53 crc kubenswrapper[4689]: E1013 21:12:53.867493 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:53 crc kubenswrapper[4689]: E1013 21:12:53.868165 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.886548 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.896086 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.896179 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.896208 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.896257 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.896279 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:53Z","lastTransitionTime":"2025-10-13T21:12:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.901317 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.919864 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccef9c37e02af903206070c97fd613b1475d32f45e355519a1af7598e6e69eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.934285 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vxqkf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d172f3cf-bf0e-4051-8128-9fbaae5e2e70\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3aa0ecf9fb2e3d5177e0d9bb905a371397d99f688a621cb986fe8268ae1eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d6vt7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vxqkf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.948465 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nffnw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5bb2ee-abeb-4342-929a-d61e89f30351\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6l477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nffnw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.963525 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bd15308b-c34d-49c6-ae7b-0ec0bfd35ffa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d81c82ccbccf9dedc89e79f02bbb7e9d90dfcc4d8e3a3cab517bed3c59b80c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://907c9f5ffefa885a9eb523a631b79919e7bfc341b2a3b39021be9a075a47b0c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.980319 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47f9a9234be8057ea6ecabd7a705e4ee92d47c8f495ea089fdd24d308e6e8501\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b77e7b280c15580b09c90edeefba278002d92801e15f2863c762e202aba9dc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:53 crc kubenswrapper[4689]: I1013 21:12:53.998226 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e491f132b01684f3b7fb0133cdd2f2a0e5ab93ab8c86fee550cfe035746831bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:53Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.000572 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.000628 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.000645 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.000666 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.000681 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:54Z","lastTransitionTime":"2025-10-13T21:12:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.015806 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1863da92-265f-451e-a741-a184c8d3f781\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d37fb8380cb98cb9aa1f70f76ff7f16cccdf6bd8eb7aab714863e258533ce7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m75c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-w5fqm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.039155 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15ff2f20-3071-4cf3-80b2-37e3e36d731b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83af01c592a0f3ab02461885df02a49ab021093ae39b39db0eddd0f919814992\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43a60324e01493373cf987cfb402d540b5ebcb02f16397ccdd17b3de58398488\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23c91abaa080d1d5d2cf60aa766953b8bf27b69c94c8aa13a133e3ac487d68d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f2ba3565600e241e73836a36ba1e0ecc5166d17a655c23607478d2392326caf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a0b3a82399f73e3e9cb1763a2e368c545f44193b433307ec71211c027cabcef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f69ad95aa26566d5477c8574d80e8f13a89fa6316970089b1ffc86b2cfc322e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b878a07240e9940b21efb01b0b74c182100573f6906248a4a32db2ddbe4da07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mv27l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4zr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.057000 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vvvxg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12667451-309d-4f81-9d93-ffd3c3299a41\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2725fa80d65bc1765f063a5054f06243be94101772e05f05ae35d5b19c4d323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mztkn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vvvxg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.079411 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6c6f31fd-3600-4a1e-80ed-108d1a6d8d17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d53754fb2202db6286dadc7f34fb96743025928775062e131fd92db55e9f35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://912ff8b1e68ebb3a14a54fef511cb5a3cba28c78bc298320e2fc06d2aee0f1ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b42b257a4e6b240e7971a3d66e1156ac9588a4080aa60a41fe64d51841014ac\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac200139719a0efbca3ea8353bfa9a7b8c35efa97e50539ac4bec1a9f464530\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d296a3c83f6a98333f369e57159d278d65207942443af677a50f504a1a3f8bcd\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"6 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1013 21:11:43.619945 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1013 21:11:43.619954 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1013 21:11:43.619959 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1013 21:11:43.619965 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1013 21:11:43.620092 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1013 21:11:43.633006 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1760389887\\\\\\\\\\\\\\\" (2025-10-13 21:11:26 +0000 UTC to 2025-11-12 21:11:27 +0000 UTC (now=2025-10-13 21:11:43.632954026 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633206 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1760389897\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1760389897\\\\\\\\\\\\\\\" (2025-10-13 20:11:37 +0000 UTC to 2026-10-13 20:11:37 +0000 UTC (now=2025-10-13 21:11:43.633181021 +0000 UTC))\\\\\\\"\\\\nI1013 21:11:43.633233 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1013 21:11:43.633265 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1013 21:11:43.633297 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2506484717/tls.crt::/tmp/serving-cert-2506484717/tls.key\\\\\\\"\\\\nI1013 21:11:43.633665 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1013 21:11:43.635870 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9041c5fdaa9cbc6f8cf3272f6a3946e70cdf0c3dd3be558881dd27c858d8c1cc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7feecb360deee4c988f9092646f48506aa2aa10488d1be540a3cc70d302d96d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.099237 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2000e73e-fdf2-457d-952f-761133c2e80b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8edaf8123ec989e9d670d65e85b5842d4b6dd48d6c71971fab3bdbec53c4bc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c44cb5b77c12c5310a2eb3db1d70765044389d4bf70bc288219c65bff013851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f5a33b1b76942396fa244cc9016f1bf3dadbe4b96383d397a4331d7dc3e8c80e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6344b3d9d460c10255b4472b57dde2c56eed2bae4fd3c3ed10e4847e316115\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.103982 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.104028 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.104039 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.104057 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.104070 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:54Z","lastTransitionTime":"2025-10-13T21:12:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.123197 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.141631 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xr7rr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"632b68ca-d2a4-4570-a0a2-8ea8d204fb59\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99a0f1440b3ddf29def15a59eb83538501635a83344f4310c08091290ffd6d99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:33Z\\\",\\\"message\\\":\\\"2025-10-13T21:11:47+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d67bd457-1d6d-4d1c-9433-278646395a6a\\\\n2025-10-13T21:11:47+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d67bd457-1d6d-4d1c-9433-278646395a6a to /host/opt/cni/bin/\\\\n2025-10-13T21:11:47Z [verbose] multus-daemon started\\\\n2025-10-13T21:11:47Z [verbose] Readiness Indicator file check\\\\n2025-10-13T21:12:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:12:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zr52b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xr7rr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.174104 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5736acbe-9793-447e-9e22-76b0f407bfb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-13T21:12:41Z\\\",\\\"message\\\":\\\"ller event handlers\\\\nI1013 21:12:41.846715 6670 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1013 21:12:41.846770 6670 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1013 21:12:41.846783 6670 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1013 21:12:41.846814 6670 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1013 21:12:41.846849 6670 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1013 21:12:41.846867 6670 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1013 21:12:41.846852 6670 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1013 21:12:41.846887 6670 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1013 21:12:41.846905 6670 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1013 21:12:41.846849 6670 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1013 21:12:41.846945 6670 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1013 21:12:41.846952 6670 factory.go:656] Stopping watch factory\\\\nI1013 21:12:41.846984 6670 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1013 21:12:41.847005 6670 ovnkube.go:599] Stopped ovnkube\\\\nI1013 21:12:41.847031 6670 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1013 21:12:41.847108 6670 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-13T21:12:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9qn8m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xml6c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.195233 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"55d53b5e-3ce0-4f24-9f15-770b7c23e8e2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921e69ce8e8831ad4920b14b6e2123c9e8166c6b815a305902563159ea2391de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9e271265faa0e335e5d909e7d90020d3925945f982b3bcc35ed65af665f70b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xgwvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v6xvt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.207323 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.207385 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.207399 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.207421 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.207436 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:54Z","lastTransitionTime":"2025-10-13T21:12:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.221675 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d990a16-ab16-4a1e-9379-b6b94625f0d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e85230712d4c0ca6bb978749dc0baaa3ea5c4913bfea65f51b7b05dfe375684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://539b7a6ad051f8242efe983f7c3888b71f4a155945e8833beedb9c7955ae3c06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a1af5ece538dd953ae6db50243404813b961b6a9aca47206a96330b56e0aed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b3644bdf4729ffaf067239780a4df8bdb850ec2a7f56f4e8c5281d531298575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5a3326d7d1bbcedd89f977a90932165d8c1585e50448eba697cf1a84aebe0f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://490a1fdbfa66049453fc1a1f60b0bdc44ee65218037e302293b8750f9db9c276\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c065a8507d17f490a428a6e37cad5c376b3b2d22e5669b4561dfd540f49bd8aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65b1b2e1ad21f798d3f87024c0a1795369e8024d76f923f7cc75112453447a77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.239689 4689 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3cfd1e01-432a-479f-a783-6b9e1acfcb16\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:12:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-13T21:11:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04aa23baca3c99a463fd363ffaf50dc400529a0a3d4af30a55b2353eb8e2dd61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82c0a408a02f1929bf23ad55f145eee897886adc62cc66804f18387d36891c85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af51bb8f542b6d1141422c7d41cd987e860d572c385b90d3f38f5f57cbd902c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-13T21:11:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c91f8d5d07cab6bdb1f0af54ae2c3129b3fc77696f6829bb6c693795a07e90de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-13T21:11:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-13T21:11:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-13T21:11:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-13T21:12:54Z is after 2025-08-24T17:21:41Z" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.309953 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.310003 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.310014 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.310038 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.310051 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:54Z","lastTransitionTime":"2025-10-13T21:12:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.413655 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.413742 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.413765 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.413795 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.413816 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:54Z","lastTransitionTime":"2025-10-13T21:12:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.516751 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.516847 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.516869 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.516904 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.516927 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:54Z","lastTransitionTime":"2025-10-13T21:12:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.620505 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.620677 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.620712 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.620748 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.620772 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:54Z","lastTransitionTime":"2025-10-13T21:12:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.723934 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.724025 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.724044 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.724078 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.724102 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:54Z","lastTransitionTime":"2025-10-13T21:12:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.826622 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.826761 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.826818 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.826845 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.826862 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:54Z","lastTransitionTime":"2025-10-13T21:12:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.866727 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:54 crc kubenswrapper[4689]: E1013 21:12:54.867078 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.929103 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.929152 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.929169 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.929193 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:54 crc kubenswrapper[4689]: I1013 21:12:54.929211 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:54Z","lastTransitionTime":"2025-10-13T21:12:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.033120 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.033173 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.033184 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.033202 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.033214 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:55Z","lastTransitionTime":"2025-10-13T21:12:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.135742 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.135818 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.135832 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.135859 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.135899 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:55Z","lastTransitionTime":"2025-10-13T21:12:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.238824 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.238894 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.238919 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.238952 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.238978 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:55Z","lastTransitionTime":"2025-10-13T21:12:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.343019 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.343108 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.343138 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.343168 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.343190 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:55Z","lastTransitionTime":"2025-10-13T21:12:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.446730 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.446823 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.446838 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.446858 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.446900 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:55Z","lastTransitionTime":"2025-10-13T21:12:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.549852 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.549902 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.549914 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.549931 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.549944 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:55Z","lastTransitionTime":"2025-10-13T21:12:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.653020 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.653061 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.653075 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.653092 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.653105 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:55Z","lastTransitionTime":"2025-10-13T21:12:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.755724 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.755778 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.755789 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.755805 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.755814 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:55Z","lastTransitionTime":"2025-10-13T21:12:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.859894 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.860017 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.860039 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.860111 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.860134 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:55Z","lastTransitionTime":"2025-10-13T21:12:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.867353 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.867463 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.867468 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:55 crc kubenswrapper[4689]: E1013 21:12:55.867933 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:55 crc kubenswrapper[4689]: E1013 21:12:55.868318 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:55 crc kubenswrapper[4689]: E1013 21:12:55.868489 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.868979 4689 scope.go:117] "RemoveContainer" containerID="7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1" Oct 13 21:12:55 crc kubenswrapper[4689]: E1013 21:12:55.869158 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.962651 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.962707 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.962724 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.962749 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:55 crc kubenswrapper[4689]: I1013 21:12:55.962767 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:55Z","lastTransitionTime":"2025-10-13T21:12:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.066173 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.066230 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.066240 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.066261 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.066275 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:56Z","lastTransitionTime":"2025-10-13T21:12:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.170165 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.170239 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.170261 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.170288 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.170306 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:56Z","lastTransitionTime":"2025-10-13T21:12:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.273550 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.273691 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.273716 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.273741 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.273771 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:56Z","lastTransitionTime":"2025-10-13T21:12:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.377006 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.377072 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.377086 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.377104 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.377114 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:56Z","lastTransitionTime":"2025-10-13T21:12:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.480597 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.480691 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.480709 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.480733 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.480752 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:56Z","lastTransitionTime":"2025-10-13T21:12:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.584143 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.584209 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.584225 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.584249 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.584263 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:56Z","lastTransitionTime":"2025-10-13T21:12:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.686773 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.686844 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.686860 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.686883 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.686897 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:56Z","lastTransitionTime":"2025-10-13T21:12:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.789366 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.789420 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.789432 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.789452 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.789463 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:56Z","lastTransitionTime":"2025-10-13T21:12:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.866940 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:56 crc kubenswrapper[4689]: E1013 21:12:56.867204 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.893644 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.893720 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.893740 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.893764 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.893785 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:56Z","lastTransitionTime":"2025-10-13T21:12:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.997746 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.997858 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.997882 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.998093 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:56 crc kubenswrapper[4689]: I1013 21:12:56.998117 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:56Z","lastTransitionTime":"2025-10-13T21:12:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.101892 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.101965 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.101982 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.102012 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.102031 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:57Z","lastTransitionTime":"2025-10-13T21:12:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.205468 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.205529 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.205548 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.205568 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.205589 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:57Z","lastTransitionTime":"2025-10-13T21:12:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.309050 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.309118 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.309138 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.309161 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.309179 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:57Z","lastTransitionTime":"2025-10-13T21:12:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.411688 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.411747 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.411763 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.411787 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.411807 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:57Z","lastTransitionTime":"2025-10-13T21:12:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.515218 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.515271 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.515282 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.515325 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.515343 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:57Z","lastTransitionTime":"2025-10-13T21:12:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.618485 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.618640 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.618659 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.618682 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.618705 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:57Z","lastTransitionTime":"2025-10-13T21:12:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.721814 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.721871 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.721886 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.721907 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.721919 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:57Z","lastTransitionTime":"2025-10-13T21:12:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.824783 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.824867 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.824892 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.824928 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.824953 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:57Z","lastTransitionTime":"2025-10-13T21:12:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.866649 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:57 crc kubenswrapper[4689]: E1013 21:12:57.866814 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.866816 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.866882 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:57 crc kubenswrapper[4689]: E1013 21:12:57.866926 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:57 crc kubenswrapper[4689]: E1013 21:12:57.866980 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.928566 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.928674 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.928694 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.928724 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:57 crc kubenswrapper[4689]: I1013 21:12:57.928743 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:57Z","lastTransitionTime":"2025-10-13T21:12:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.031446 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.031513 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.031525 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.031547 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.031561 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:58Z","lastTransitionTime":"2025-10-13T21:12:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.134455 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.134521 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.134538 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.134567 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.134658 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:58Z","lastTransitionTime":"2025-10-13T21:12:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.237759 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.237832 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.237853 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.237878 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.237897 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:58Z","lastTransitionTime":"2025-10-13T21:12:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.341813 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.341897 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.341914 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.341950 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.341986 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:58Z","lastTransitionTime":"2025-10-13T21:12:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.445400 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.445463 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.445475 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.445491 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.445503 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:58Z","lastTransitionTime":"2025-10-13T21:12:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.548772 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.548825 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.548839 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.548856 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.548869 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:58Z","lastTransitionTime":"2025-10-13T21:12:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.651075 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.651117 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.651127 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.651144 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.651155 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:58Z","lastTransitionTime":"2025-10-13T21:12:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.753516 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.753581 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.753595 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.753628 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.753643 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:58Z","lastTransitionTime":"2025-10-13T21:12:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.857249 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.857290 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.857301 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.857319 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.857332 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:58Z","lastTransitionTime":"2025-10-13T21:12:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.866871 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:12:58 crc kubenswrapper[4689]: E1013 21:12:58.867219 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.959295 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.959720 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.959948 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.960088 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:58 crc kubenswrapper[4689]: I1013 21:12:58.960218 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:58Z","lastTransitionTime":"2025-10-13T21:12:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.064117 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.064167 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.064176 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.064194 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.064204 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:59Z","lastTransitionTime":"2025-10-13T21:12:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.166833 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.166876 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.166889 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.166908 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.166921 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:59Z","lastTransitionTime":"2025-10-13T21:12:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.269091 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.269165 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.269184 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.269215 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.269235 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:59Z","lastTransitionTime":"2025-10-13T21:12:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.372185 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.372239 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.372249 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.372264 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.372276 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:59Z","lastTransitionTime":"2025-10-13T21:12:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.475529 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.475613 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.475629 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.475654 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.475673 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:59Z","lastTransitionTime":"2025-10-13T21:12:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.578563 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.578639 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.578654 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.578672 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.578684 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:59Z","lastTransitionTime":"2025-10-13T21:12:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.681850 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.681897 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.681927 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.681943 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.681951 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:59Z","lastTransitionTime":"2025-10-13T21:12:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.785755 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.785840 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.785863 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.785896 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.785918 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:59Z","lastTransitionTime":"2025-10-13T21:12:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.866688 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.866763 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.866763 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:12:59 crc kubenswrapper[4689]: E1013 21:12:59.866972 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:12:59 crc kubenswrapper[4689]: E1013 21:12:59.867369 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:12:59 crc kubenswrapper[4689]: E1013 21:12:59.867656 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.890424 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.890504 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.890529 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.890562 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.890587 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:59Z","lastTransitionTime":"2025-10-13T21:12:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.994122 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.994219 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.994251 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.994285 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:12:59 crc kubenswrapper[4689]: I1013 21:12:59.994309 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:12:59Z","lastTransitionTime":"2025-10-13T21:12:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.097043 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.097138 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.097152 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.097175 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.097189 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:13:00Z","lastTransitionTime":"2025-10-13T21:13:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.199758 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.199821 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.199831 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.199853 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.199865 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:13:00Z","lastTransitionTime":"2025-10-13T21:13:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.302892 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.302959 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.302979 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.303011 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.303034 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:13:00Z","lastTransitionTime":"2025-10-13T21:13:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.405566 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.405682 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.405707 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.405738 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.405761 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:13:00Z","lastTransitionTime":"2025-10-13T21:13:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.508495 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.508640 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.508670 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.508708 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.508737 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:13:00Z","lastTransitionTime":"2025-10-13T21:13:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.613271 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.613337 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.613356 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.613380 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.613397 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:13:00Z","lastTransitionTime":"2025-10-13T21:13:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.718139 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.718208 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.718226 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.718250 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.718268 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:13:00Z","lastTransitionTime":"2025-10-13T21:13:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.822083 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.822161 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.822190 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.822217 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.822238 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:13:00Z","lastTransitionTime":"2025-10-13T21:13:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.867104 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:00 crc kubenswrapper[4689]: E1013 21:13:00.867298 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.925949 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.926035 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.926061 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.926090 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:13:00 crc kubenswrapper[4689]: I1013 21:13:00.926113 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:13:00Z","lastTransitionTime":"2025-10-13T21:13:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.029502 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.029552 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.029565 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.029582 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.029650 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:13:01Z","lastTransitionTime":"2025-10-13T21:13:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.132284 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.132367 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.132385 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.132411 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.132432 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:13:01Z","lastTransitionTime":"2025-10-13T21:13:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.235958 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.236022 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.236040 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.236062 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.236076 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:13:01Z","lastTransitionTime":"2025-10-13T21:13:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.339538 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.339644 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.339668 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.339692 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.339711 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:13:01Z","lastTransitionTime":"2025-10-13T21:13:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.442891 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.442941 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.442952 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.442971 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.442985 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:13:01Z","lastTransitionTime":"2025-10-13T21:13:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.546374 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.546451 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.546470 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.546496 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.546514 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:13:01Z","lastTransitionTime":"2025-10-13T21:13:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.649567 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.649664 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.649681 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.649707 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.649726 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:13:01Z","lastTransitionTime":"2025-10-13T21:13:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.690653 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.690686 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.690696 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.690713 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.690725 4689 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-13T21:13:01Z","lastTransitionTime":"2025-10-13T21:13:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.764653 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492"] Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.765210 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.769293 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.769441 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.769521 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.770924 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.812166 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podStartSLOduration=76.812130264 podStartE2EDuration="1m16.812130264s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:01.810251498 +0000 UTC m=+98.728496633" watchObservedRunningTime="2025-10-13 21:13:01.812130264 +0000 UTC m=+98.730375389" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.834396 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-4zr4r" podStartSLOduration=76.834369464 podStartE2EDuration="1m16.834369464s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:01.834265132 +0000 UTC m=+98.752510267" watchObservedRunningTime="2025-10-13 21:13:01.834369464 +0000 UTC m=+98.752614559" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.862328 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-vvvxg" podStartSLOduration=77.862292894 podStartE2EDuration="1m17.862292894s" podCreationTimestamp="2025-10-13 21:11:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:01.856857992 +0000 UTC m=+98.775103087" watchObservedRunningTime="2025-10-13 21:13:01.862292894 +0000 UTC m=+98.780538019" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.866784 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.866817 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.866784 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:01 crc kubenswrapper[4689]: E1013 21:13:01.867192 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:01 crc kubenswrapper[4689]: E1013 21:13:01.867305 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:01 crc kubenswrapper[4689]: E1013 21:13:01.867452 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.889827 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6827a8c7-611c-46c1-97ac-4f9b9a46f722-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rt492\" (UID: \"6827a8c7-611c-46c1-97ac-4f9b9a46f722\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.889886 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/6827a8c7-611c-46c1-97ac-4f9b9a46f722-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rt492\" (UID: \"6827a8c7-611c-46c1-97ac-4f9b9a46f722\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.889920 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/6827a8c7-611c-46c1-97ac-4f9b9a46f722-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rt492\" (UID: \"6827a8c7-611c-46c1-97ac-4f9b9a46f722\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.890067 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6827a8c7-611c-46c1-97ac-4f9b9a46f722-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rt492\" (UID: \"6827a8c7-611c-46c1-97ac-4f9b9a46f722\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.890125 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6827a8c7-611c-46c1-97ac-4f9b9a46f722-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rt492\" (UID: \"6827a8c7-611c-46c1-97ac-4f9b9a46f722\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.915401 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=77.915375055 podStartE2EDuration="1m17.915375055s" podCreationTimestamp="2025-10-13 21:11:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:01.914904294 +0000 UTC m=+98.833149379" watchObservedRunningTime="2025-10-13 21:13:01.915375055 +0000 UTC m=+98.833620140" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.938624 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=71.938603421 podStartE2EDuration="1m11.938603421s" podCreationTimestamp="2025-10-13 21:11:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:01.936361666 +0000 UTC m=+98.854606751" watchObservedRunningTime="2025-10-13 21:13:01.938603421 +0000 UTC m=+98.856848506" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.973013 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-xr7rr" podStartSLOduration=76.972993127 podStartE2EDuration="1m16.972993127s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:01.972671119 +0000 UTC m=+98.890916204" watchObservedRunningTime="2025-10-13 21:13:01.972993127 +0000 UTC m=+98.891238212" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.991137 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6827a8c7-611c-46c1-97ac-4f9b9a46f722-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rt492\" (UID: \"6827a8c7-611c-46c1-97ac-4f9b9a46f722\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.991192 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6827a8c7-611c-46c1-97ac-4f9b9a46f722-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rt492\" (UID: \"6827a8c7-611c-46c1-97ac-4f9b9a46f722\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.991254 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6827a8c7-611c-46c1-97ac-4f9b9a46f722-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rt492\" (UID: \"6827a8c7-611c-46c1-97ac-4f9b9a46f722\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.991291 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/6827a8c7-611c-46c1-97ac-4f9b9a46f722-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rt492\" (UID: \"6827a8c7-611c-46c1-97ac-4f9b9a46f722\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.991331 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/6827a8c7-611c-46c1-97ac-4f9b9a46f722-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rt492\" (UID: \"6827a8c7-611c-46c1-97ac-4f9b9a46f722\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.991387 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/6827a8c7-611c-46c1-97ac-4f9b9a46f722-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rt492\" (UID: \"6827a8c7-611c-46c1-97ac-4f9b9a46f722\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.991813 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/6827a8c7-611c-46c1-97ac-4f9b9a46f722-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rt492\" (UID: \"6827a8c7-611c-46c1-97ac-4f9b9a46f722\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.992136 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6827a8c7-611c-46c1-97ac-4f9b9a46f722-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rt492\" (UID: \"6827a8c7-611c-46c1-97ac-4f9b9a46f722\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" Oct 13 21:13:01 crc kubenswrapper[4689]: I1013 21:13:01.998314 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6827a8c7-611c-46c1-97ac-4f9b9a46f722-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rt492\" (UID: \"6827a8c7-611c-46c1-97ac-4f9b9a46f722\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" Oct 13 21:13:02 crc kubenswrapper[4689]: I1013 21:13:02.014952 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v6xvt" podStartSLOduration=77.014928927 podStartE2EDuration="1m17.014928927s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:02.014653701 +0000 UTC m=+98.932898796" watchObservedRunningTime="2025-10-13 21:13:02.014928927 +0000 UTC m=+98.933174012" Oct 13 21:13:02 crc kubenswrapper[4689]: I1013 21:13:02.015371 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6827a8c7-611c-46c1-97ac-4f9b9a46f722-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rt492\" (UID: \"6827a8c7-611c-46c1-97ac-4f9b9a46f722\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" Oct 13 21:13:02 crc kubenswrapper[4689]: I1013 21:13:02.068123 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=76.068105901 podStartE2EDuration="1m16.068105901s" podCreationTimestamp="2025-10-13 21:11:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:02.067847215 +0000 UTC m=+98.986092300" watchObservedRunningTime="2025-10-13 21:13:02.068105901 +0000 UTC m=+98.986350986" Oct 13 21:13:02 crc kubenswrapper[4689]: I1013 21:13:02.081183 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=50.081164699 podStartE2EDuration="50.081164699s" podCreationTimestamp="2025-10-13 21:12:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:02.080361339 +0000 UTC m=+98.998606424" watchObservedRunningTime="2025-10-13 21:13:02.081164699 +0000 UTC m=+98.999409784" Oct 13 21:13:02 crc kubenswrapper[4689]: I1013 21:13:02.093681 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" Oct 13 21:13:02 crc kubenswrapper[4689]: I1013 21:13:02.143720 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-vxqkf" podStartSLOduration=78.14370009 podStartE2EDuration="1m18.14370009s" podCreationTimestamp="2025-10-13 21:11:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:02.142623175 +0000 UTC m=+99.060868270" watchObservedRunningTime="2025-10-13 21:13:02.14370009 +0000 UTC m=+99.061945175" Oct 13 21:13:02 crc kubenswrapper[4689]: I1013 21:13:02.169589 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=32.1695571 podStartE2EDuration="32.1695571s" podCreationTimestamp="2025-10-13 21:12:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:02.169050007 +0000 UTC m=+99.087295112" watchObservedRunningTime="2025-10-13 21:13:02.1695571 +0000 UTC m=+99.087802185" Oct 13 21:13:02 crc kubenswrapper[4689]: I1013 21:13:02.518998 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" event={"ID":"6827a8c7-611c-46c1-97ac-4f9b9a46f722","Type":"ContainerStarted","Data":"a63cddcb6744b1346c5d9f052341fc44b6fbb68e9fe6124ab1fd80623df497fe"} Oct 13 21:13:02 crc kubenswrapper[4689]: I1013 21:13:02.519051 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" event={"ID":"6827a8c7-611c-46c1-97ac-4f9b9a46f722","Type":"ContainerStarted","Data":"b5c22cc02e1f22dc622c406e78bd5afa2dd40a284b8b0ba0de3be9f21b165bcf"} Oct 13 21:13:02 crc kubenswrapper[4689]: I1013 21:13:02.535457 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rt492" podStartSLOduration=77.535438281 podStartE2EDuration="1m17.535438281s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:02.534264362 +0000 UTC m=+99.452509447" watchObservedRunningTime="2025-10-13 21:13:02.535438281 +0000 UTC m=+99.453683356" Oct 13 21:13:02 crc kubenswrapper[4689]: I1013 21:13:02.866837 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:02 crc kubenswrapper[4689]: E1013 21:13:02.867023 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:03 crc kubenswrapper[4689]: I1013 21:13:03.866858 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:03 crc kubenswrapper[4689]: I1013 21:13:03.866929 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:03 crc kubenswrapper[4689]: I1013 21:13:03.866966 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:03 crc kubenswrapper[4689]: E1013 21:13:03.868286 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:03 crc kubenswrapper[4689]: E1013 21:13:03.868387 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:03 crc kubenswrapper[4689]: E1013 21:13:03.868327 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:03 crc kubenswrapper[4689]: I1013 21:13:03.912820 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs\") pod \"network-metrics-daemon-nffnw\" (UID: \"3f5bb2ee-abeb-4342-929a-d61e89f30351\") " pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:03 crc kubenswrapper[4689]: E1013 21:13:03.913015 4689 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 21:13:03 crc kubenswrapper[4689]: E1013 21:13:03.913135 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs podName:3f5bb2ee-abeb-4342-929a-d61e89f30351 nodeName:}" failed. No retries permitted until 2025-10-13 21:14:07.913109507 +0000 UTC m=+164.831354602 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs") pod "network-metrics-daemon-nffnw" (UID: "3f5bb2ee-abeb-4342-929a-d61e89f30351") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 13 21:13:04 crc kubenswrapper[4689]: I1013 21:13:04.867425 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:04 crc kubenswrapper[4689]: E1013 21:13:04.867599 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:05 crc kubenswrapper[4689]: I1013 21:13:05.866893 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:05 crc kubenswrapper[4689]: I1013 21:13:05.866957 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:05 crc kubenswrapper[4689]: I1013 21:13:05.866909 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:05 crc kubenswrapper[4689]: E1013 21:13:05.867134 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:05 crc kubenswrapper[4689]: E1013 21:13:05.867292 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:05 crc kubenswrapper[4689]: E1013 21:13:05.867432 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:06 crc kubenswrapper[4689]: I1013 21:13:06.867185 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:06 crc kubenswrapper[4689]: E1013 21:13:06.867405 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:06 crc kubenswrapper[4689]: I1013 21:13:06.868323 4689 scope.go:117] "RemoveContainer" containerID="7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1" Oct 13 21:13:06 crc kubenswrapper[4689]: E1013 21:13:06.868533 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" Oct 13 21:13:07 crc kubenswrapper[4689]: I1013 21:13:07.867105 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:07 crc kubenswrapper[4689]: I1013 21:13:07.867187 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:07 crc kubenswrapper[4689]: I1013 21:13:07.868418 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:07 crc kubenswrapper[4689]: E1013 21:13:07.868660 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:07 crc kubenswrapper[4689]: E1013 21:13:07.868948 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:07 crc kubenswrapper[4689]: E1013 21:13:07.869049 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:08 crc kubenswrapper[4689]: I1013 21:13:08.867220 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:08 crc kubenswrapper[4689]: E1013 21:13:08.867736 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:09 crc kubenswrapper[4689]: I1013 21:13:09.866851 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:09 crc kubenswrapper[4689]: I1013 21:13:09.866999 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:09 crc kubenswrapper[4689]: E1013 21:13:09.867041 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:09 crc kubenswrapper[4689]: I1013 21:13:09.867159 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:09 crc kubenswrapper[4689]: E1013 21:13:09.867551 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:09 crc kubenswrapper[4689]: E1013 21:13:09.867811 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:10 crc kubenswrapper[4689]: I1013 21:13:10.867235 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:10 crc kubenswrapper[4689]: E1013 21:13:10.867424 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:11 crc kubenswrapper[4689]: I1013 21:13:11.867672 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:11 crc kubenswrapper[4689]: I1013 21:13:11.867789 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:11 crc kubenswrapper[4689]: I1013 21:13:11.867803 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:11 crc kubenswrapper[4689]: E1013 21:13:11.868383 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:11 crc kubenswrapper[4689]: E1013 21:13:11.868453 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:11 crc kubenswrapper[4689]: E1013 21:13:11.868653 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:12 crc kubenswrapper[4689]: I1013 21:13:12.866820 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:12 crc kubenswrapper[4689]: E1013 21:13:12.866989 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:13 crc kubenswrapper[4689]: I1013 21:13:13.867274 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:13 crc kubenswrapper[4689]: I1013 21:13:13.867349 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:13 crc kubenswrapper[4689]: I1013 21:13:13.867415 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:13 crc kubenswrapper[4689]: E1013 21:13:13.868794 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:13 crc kubenswrapper[4689]: E1013 21:13:13.869080 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:13 crc kubenswrapper[4689]: E1013 21:13:13.869183 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:14 crc kubenswrapper[4689]: I1013 21:13:14.866881 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:14 crc kubenswrapper[4689]: E1013 21:13:14.867144 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:15 crc kubenswrapper[4689]: I1013 21:13:15.866816 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:15 crc kubenswrapper[4689]: I1013 21:13:15.866865 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:15 crc kubenswrapper[4689]: E1013 21:13:15.867032 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:15 crc kubenswrapper[4689]: E1013 21:13:15.867172 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:15 crc kubenswrapper[4689]: I1013 21:13:15.867231 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:15 crc kubenswrapper[4689]: E1013 21:13:15.867334 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:16 crc kubenswrapper[4689]: I1013 21:13:16.867375 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:16 crc kubenswrapper[4689]: E1013 21:13:16.867618 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:17 crc kubenswrapper[4689]: I1013 21:13:17.867214 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:17 crc kubenswrapper[4689]: I1013 21:13:17.867286 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:17 crc kubenswrapper[4689]: E1013 21:13:17.867404 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:17 crc kubenswrapper[4689]: I1013 21:13:17.867217 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:17 crc kubenswrapper[4689]: E1013 21:13:17.867680 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:17 crc kubenswrapper[4689]: E1013 21:13:17.868234 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:17 crc kubenswrapper[4689]: I1013 21:13:17.868487 4689 scope.go:117] "RemoveContainer" containerID="7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1" Oct 13 21:13:17 crc kubenswrapper[4689]: E1013 21:13:17.868678 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xml6c_openshift-ovn-kubernetes(5736acbe-9793-447e-9e22-76b0f407bfb7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" Oct 13 21:13:18 crc kubenswrapper[4689]: I1013 21:13:18.867044 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:18 crc kubenswrapper[4689]: E1013 21:13:18.867276 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:19 crc kubenswrapper[4689]: I1013 21:13:19.590418 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xr7rr_632b68ca-d2a4-4570-a0a2-8ea8d204fb59/kube-multus/1.log" Oct 13 21:13:19 crc kubenswrapper[4689]: I1013 21:13:19.591143 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xr7rr_632b68ca-d2a4-4570-a0a2-8ea8d204fb59/kube-multus/0.log" Oct 13 21:13:19 crc kubenswrapper[4689]: I1013 21:13:19.591192 4689 generic.go:334] "Generic (PLEG): container finished" podID="632b68ca-d2a4-4570-a0a2-8ea8d204fb59" containerID="99a0f1440b3ddf29def15a59eb83538501635a83344f4310c08091290ffd6d99" exitCode=1 Oct 13 21:13:19 crc kubenswrapper[4689]: I1013 21:13:19.591232 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xr7rr" event={"ID":"632b68ca-d2a4-4570-a0a2-8ea8d204fb59","Type":"ContainerDied","Data":"99a0f1440b3ddf29def15a59eb83538501635a83344f4310c08091290ffd6d99"} Oct 13 21:13:19 crc kubenswrapper[4689]: I1013 21:13:19.591274 4689 scope.go:117] "RemoveContainer" containerID="d4e7665c8885b5b9b036d828d6e0de1931afa94385f2958198bab011f6372e44" Oct 13 21:13:19 crc kubenswrapper[4689]: I1013 21:13:19.592478 4689 scope.go:117] "RemoveContainer" containerID="99a0f1440b3ddf29def15a59eb83538501635a83344f4310c08091290ffd6d99" Oct 13 21:13:19 crc kubenswrapper[4689]: E1013 21:13:19.592871 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-xr7rr_openshift-multus(632b68ca-d2a4-4570-a0a2-8ea8d204fb59)\"" pod="openshift-multus/multus-xr7rr" podUID="632b68ca-d2a4-4570-a0a2-8ea8d204fb59" Oct 13 21:13:19 crc kubenswrapper[4689]: I1013 21:13:19.866897 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:19 crc kubenswrapper[4689]: I1013 21:13:19.867417 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:19 crc kubenswrapper[4689]: E1013 21:13:19.868081 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:19 crc kubenswrapper[4689]: I1013 21:13:19.867484 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:19 crc kubenswrapper[4689]: E1013 21:13:19.868536 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:19 crc kubenswrapper[4689]: E1013 21:13:19.867715 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:20 crc kubenswrapper[4689]: I1013 21:13:20.597617 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xr7rr_632b68ca-d2a4-4570-a0a2-8ea8d204fb59/kube-multus/1.log" Oct 13 21:13:20 crc kubenswrapper[4689]: I1013 21:13:20.866932 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:20 crc kubenswrapper[4689]: E1013 21:13:20.867476 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:21 crc kubenswrapper[4689]: I1013 21:13:21.866641 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:21 crc kubenswrapper[4689]: I1013 21:13:21.866769 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:21 crc kubenswrapper[4689]: E1013 21:13:21.866979 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:21 crc kubenswrapper[4689]: I1013 21:13:21.867096 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:21 crc kubenswrapper[4689]: E1013 21:13:21.867274 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:21 crc kubenswrapper[4689]: E1013 21:13:21.867425 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:22 crc kubenswrapper[4689]: I1013 21:13:22.866672 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:22 crc kubenswrapper[4689]: E1013 21:13:22.866824 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:23 crc kubenswrapper[4689]: I1013 21:13:23.868009 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:23 crc kubenswrapper[4689]: I1013 21:13:23.868042 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:23 crc kubenswrapper[4689]: I1013 21:13:23.868043 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:23 crc kubenswrapper[4689]: E1013 21:13:23.870900 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:23 crc kubenswrapper[4689]: E1013 21:13:23.871074 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:23 crc kubenswrapper[4689]: E1013 21:13:23.871308 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:23 crc kubenswrapper[4689]: E1013 21:13:23.883823 4689 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 13 21:13:23 crc kubenswrapper[4689]: E1013 21:13:23.961995 4689 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 13 21:13:24 crc kubenswrapper[4689]: I1013 21:13:24.866962 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:24 crc kubenswrapper[4689]: E1013 21:13:24.867161 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:25 crc kubenswrapper[4689]: I1013 21:13:25.866803 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:25 crc kubenswrapper[4689]: E1013 21:13:25.867026 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:25 crc kubenswrapper[4689]: I1013 21:13:25.866803 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:25 crc kubenswrapper[4689]: E1013 21:13:25.867481 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:25 crc kubenswrapper[4689]: I1013 21:13:25.868147 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:25 crc kubenswrapper[4689]: E1013 21:13:25.868299 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:26 crc kubenswrapper[4689]: I1013 21:13:26.866794 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:26 crc kubenswrapper[4689]: E1013 21:13:26.866988 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:27 crc kubenswrapper[4689]: I1013 21:13:27.867184 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:27 crc kubenswrapper[4689]: I1013 21:13:27.867222 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:27 crc kubenswrapper[4689]: E1013 21:13:27.868112 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:27 crc kubenswrapper[4689]: E1013 21:13:27.868231 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:27 crc kubenswrapper[4689]: I1013 21:13:27.867364 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:27 crc kubenswrapper[4689]: E1013 21:13:27.868450 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:28 crc kubenswrapper[4689]: I1013 21:13:28.867274 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:28 crc kubenswrapper[4689]: E1013 21:13:28.867559 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:28 crc kubenswrapper[4689]: I1013 21:13:28.869189 4689 scope.go:117] "RemoveContainer" containerID="7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1" Oct 13 21:13:28 crc kubenswrapper[4689]: E1013 21:13:28.963391 4689 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 13 21:13:29 crc kubenswrapper[4689]: I1013 21:13:29.636409 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovnkube-controller/3.log" Oct 13 21:13:29 crc kubenswrapper[4689]: I1013 21:13:29.640512 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerStarted","Data":"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d"} Oct 13 21:13:29 crc kubenswrapper[4689]: I1013 21:13:29.640980 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:13:29 crc kubenswrapper[4689]: I1013 21:13:29.761145 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podStartSLOduration=104.761122943 podStartE2EDuration="1m44.761122943s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:29.667832183 +0000 UTC m=+126.586077278" watchObservedRunningTime="2025-10-13 21:13:29.761122943 +0000 UTC m=+126.679368028" Oct 13 21:13:29 crc kubenswrapper[4689]: I1013 21:13:29.762274 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-nffnw"] Oct 13 21:13:29 crc kubenswrapper[4689]: I1013 21:13:29.762377 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:29 crc kubenswrapper[4689]: E1013 21:13:29.762487 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:29 crc kubenswrapper[4689]: I1013 21:13:29.866745 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:29 crc kubenswrapper[4689]: E1013 21:13:29.866883 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:29 crc kubenswrapper[4689]: I1013 21:13:29.867155 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:29 crc kubenswrapper[4689]: E1013 21:13:29.867206 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:30 crc kubenswrapper[4689]: I1013 21:13:30.867075 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:30 crc kubenswrapper[4689]: E1013 21:13:30.867285 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:31 crc kubenswrapper[4689]: I1013 21:13:31.866841 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:31 crc kubenswrapper[4689]: I1013 21:13:31.866980 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:31 crc kubenswrapper[4689]: E1013 21:13:31.867436 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:31 crc kubenswrapper[4689]: I1013 21:13:31.867176 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:31 crc kubenswrapper[4689]: E1013 21:13:31.867688 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:31 crc kubenswrapper[4689]: E1013 21:13:31.867869 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:32 crc kubenswrapper[4689]: I1013 21:13:32.866479 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:32 crc kubenswrapper[4689]: E1013 21:13:32.866630 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:33 crc kubenswrapper[4689]: I1013 21:13:33.866667 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:33 crc kubenswrapper[4689]: I1013 21:13:33.866786 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:33 crc kubenswrapper[4689]: E1013 21:13:33.869359 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:33 crc kubenswrapper[4689]: I1013 21:13:33.869399 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:33 crc kubenswrapper[4689]: E1013 21:13:33.870003 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:33 crc kubenswrapper[4689]: E1013 21:13:33.870029 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:33 crc kubenswrapper[4689]: E1013 21:13:33.964524 4689 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 13 21:13:34 crc kubenswrapper[4689]: I1013 21:13:34.866844 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:34 crc kubenswrapper[4689]: I1013 21:13:34.866960 4689 scope.go:117] "RemoveContainer" containerID="99a0f1440b3ddf29def15a59eb83538501635a83344f4310c08091290ffd6d99" Oct 13 21:13:34 crc kubenswrapper[4689]: E1013 21:13:34.866999 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:35 crc kubenswrapper[4689]: I1013 21:13:35.664392 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xr7rr_632b68ca-d2a4-4570-a0a2-8ea8d204fb59/kube-multus/1.log" Oct 13 21:13:35 crc kubenswrapper[4689]: I1013 21:13:35.664469 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xr7rr" event={"ID":"632b68ca-d2a4-4570-a0a2-8ea8d204fb59","Type":"ContainerStarted","Data":"5385df0cedf34a0e0af7240e6b65d843bf09de075d7669b439235c390b995e45"} Oct 13 21:13:35 crc kubenswrapper[4689]: I1013 21:13:35.867370 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:35 crc kubenswrapper[4689]: I1013 21:13:35.867411 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:35 crc kubenswrapper[4689]: E1013 21:13:35.868350 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:35 crc kubenswrapper[4689]: E1013 21:13:35.868405 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:35 crc kubenswrapper[4689]: I1013 21:13:35.867456 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:35 crc kubenswrapper[4689]: E1013 21:13:35.868548 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:36 crc kubenswrapper[4689]: I1013 21:13:36.867211 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:36 crc kubenswrapper[4689]: E1013 21:13:36.867445 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:37 crc kubenswrapper[4689]: I1013 21:13:37.867408 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:37 crc kubenswrapper[4689]: I1013 21:13:37.867437 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:37 crc kubenswrapper[4689]: E1013 21:13:37.867717 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 13 21:13:37 crc kubenswrapper[4689]: I1013 21:13:37.867862 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:37 crc kubenswrapper[4689]: E1013 21:13:37.867914 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 13 21:13:37 crc kubenswrapper[4689]: E1013 21:13:37.868095 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nffnw" podUID="3f5bb2ee-abeb-4342-929a-d61e89f30351" Oct 13 21:13:38 crc kubenswrapper[4689]: I1013 21:13:38.866794 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:38 crc kubenswrapper[4689]: E1013 21:13:38.866955 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 13 21:13:39 crc kubenswrapper[4689]: I1013 21:13:39.866956 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:13:39 crc kubenswrapper[4689]: I1013 21:13:39.867080 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:39 crc kubenswrapper[4689]: I1013 21:13:39.867311 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:39 crc kubenswrapper[4689]: I1013 21:13:39.872323 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 13 21:13:39 crc kubenswrapper[4689]: I1013 21:13:39.872822 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 13 21:13:39 crc kubenswrapper[4689]: I1013 21:13:39.872873 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 13 21:13:39 crc kubenswrapper[4689]: I1013 21:13:39.872877 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 13 21:13:40 crc kubenswrapper[4689]: I1013 21:13:40.866771 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:40 crc kubenswrapper[4689]: I1013 21:13:40.870286 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 13 21:13:40 crc kubenswrapper[4689]: I1013 21:13:40.872516 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.670191 4689 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.758448 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.759228 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.760321 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-j5nvb"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.761295 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.762270 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p7j66"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.762751 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.769606 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7gghw"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.771074 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.772615 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.772999 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.778307 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fdwnr"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.779889 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.780245 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-nr7vc"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.780630 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.781293 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.781710 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.782142 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.782531 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.782944 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.783122 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.787700 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-znbxz"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.788511 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-nr7vc" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.799961 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-znbxz" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.800084 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.800369 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.802607 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.802932 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.803531 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.803701 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.803924 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.804136 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.804348 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.805114 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.806750 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.807384 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cm8l7"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.808162 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ptbs8"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.808433 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.808794 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.808823 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ptbs8" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.808998 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.810282 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.810659 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cm8l7" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.811706 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.812576 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.813135 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.814509 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-xsgbv"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.814814 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.815006 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.813179 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.813292 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.813297 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.813364 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.813378 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.813418 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.813691 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.811718 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.815482 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.815697 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.815856 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.815893 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.815967 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.816166 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.816502 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.818286 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-z5vx4"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.819501 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-tckgt"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.819893 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-gjkn6"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.820150 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m87tp"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.820504 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.820879 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tckgt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.821050 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.821074 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.821126 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-xsgbv" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.821403 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.822332 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.822650 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.822771 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.822811 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9bdb5"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.823405 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.826041 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.828570 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.828675 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.828759 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.828776 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.832805 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.833263 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.833711 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.834906 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.835659 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.835901 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.836335 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.837450 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.837524 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.837820 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.837867 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.838070 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.838336 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.838682 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.838983 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.839568 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.843404 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.856164 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.855545 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-njhdl"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.856332 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.844051 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.844392 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.856688 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.844400 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.846121 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.846118 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.849133 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.849157 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.849389 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.849745 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.850042 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.856049 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.856630 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.858537 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-njhdl" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.871851 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.876049 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.876322 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.876414 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.877046 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.877247 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.877395 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-nlljk"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.877944 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.878107 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.878368 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.878406 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mql5t"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.878995 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.879376 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vksbd"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.879578 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.880123 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vksbd" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.882290 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.882563 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.882827 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.883041 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2dd45"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.883776 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qtbn4"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.884423 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-qtbn4" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.884566 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2dd45" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.883052 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.884873 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.883400 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.883435 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.885917 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8f84m"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.886508 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8f84m" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.886818 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.894185 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.894816 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2kfvc"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.895275 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.888031 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.895624 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-dsbhf"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.888124 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.895715 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.894904 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.896438 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2kfvc" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.896992 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-z7dn5"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.897387 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.897554 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dsbhf" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.898260 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-z7dn5" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.899548 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.899701 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.899799 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.899890 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.900087 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.900185 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.900260 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.900427 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.901642 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.901710 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902183 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/202442b7-241e-44ee-b24f-0eac63864890-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fdwnr\" (UID: \"202442b7-241e-44ee-b24f-0eac63864890\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902219 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-serving-cert\") pod \"controller-manager-879f6c89f-p7j66\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902245 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902264 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8gm8\" (UniqueName: \"kubernetes.io/projected/9db38791-f017-4d4f-b9e9-08f3ccd38704-kube-api-access-r8gm8\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902281 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-config\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902309 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6v7qk\" (UniqueName: \"kubernetes.io/projected/58d51d91-5495-47c7-a6ed-9a8964688b49-kube-api-access-6v7qk\") pod \"openshift-config-operator-7777fb866f-tckgt\" (UID: \"58d51d91-5495-47c7-a6ed-9a8964688b49\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tckgt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902327 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-audit-policies\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902348 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902367 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902398 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-oauth-serving-cert\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902460 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-config\") pod \"route-controller-manager-6576b87f9c-82j7d\" (UID: \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902500 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-p7j66\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902524 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/202442b7-241e-44ee-b24f-0eac63864890-config\") pod \"machine-api-operator-5694c8668f-fdwnr\" (UID: \"202442b7-241e-44ee-b24f-0eac63864890\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902541 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxpcm\" (UniqueName: \"kubernetes.io/projected/c5168fba-bd94-4b2b-a4d4-886f93ea2988-kube-api-access-mxpcm\") pod \"openshift-controller-manager-operator-756b6f6bc6-ptbs8\" (UID: \"c5168fba-bd94-4b2b-a4d4-886f93ea2988\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ptbs8" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902600 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/72d762df-fe7c-4966-a3fe-62904ca5b93a-metrics-tls\") pod \"ingress-operator-5b745b69d9-4zcht\" (UID: \"72d762df-fe7c-4966-a3fe-62904ca5b93a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902618 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5168fba-bd94-4b2b-a4d4-886f93ea2988-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-ptbs8\" (UID: \"c5168fba-bd94-4b2b-a4d4-886f93ea2988\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ptbs8" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902636 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58d51d91-5495-47c7-a6ed-9a8964688b49-serving-cert\") pod \"openshift-config-operator-7777fb866f-tckgt\" (UID: \"58d51d91-5495-47c7-a6ed-9a8964688b49\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tckgt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902652 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5168fba-bd94-4b2b-a4d4-886f93ea2988-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-ptbs8\" (UID: \"c5168fba-bd94-4b2b-a4d4-886f93ea2988\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ptbs8" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902682 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sbkj\" (UniqueName: \"kubernetes.io/projected/72d762df-fe7c-4966-a3fe-62904ca5b93a-kube-api-access-8sbkj\") pod \"ingress-operator-5b745b69d9-4zcht\" (UID: \"72d762df-fe7c-4966-a3fe-62904ca5b93a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902708 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mphjw\" (UniqueName: \"kubernetes.io/projected/1f429fdd-330f-4526-9fcd-fb6293286256-kube-api-access-mphjw\") pod \"downloads-7954f5f757-xsgbv\" (UID: \"1f429fdd-330f-4526-9fcd-fb6293286256\") " pod="openshift-console/downloads-7954f5f757-xsgbv" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902728 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/187c4133-36b2-4d56-be78-75d555af16a4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5wbmg\" (UID: \"187c4133-36b2-4d56-be78-75d555af16a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902744 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902758 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-oauth-config\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902788 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8rkp\" (UniqueName: \"kubernetes.io/projected/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-kube-api-access-c8rkp\") pod \"controller-manager-879f6c89f-p7j66\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902808 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902826 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-service-ca\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902845 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-client-ca\") pod \"route-controller-manager-6576b87f9c-82j7d\" (UID: \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902866 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/187c4133-36b2-4d56-be78-75d555af16a4-proxy-tls\") pod \"machine-config-controller-84d6567774-5wbmg\" (UID: \"187c4133-36b2-4d56-be78-75d555af16a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902885 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-trusted-ca-bundle\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902903 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/72d762df-fe7c-4966-a3fe-62904ca5b93a-trusted-ca\") pod \"ingress-operator-5b745b69d9-4zcht\" (UID: \"72d762df-fe7c-4966-a3fe-62904ca5b93a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902919 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-df9q8\" (UniqueName: \"kubernetes.io/projected/0d4c9845-75c1-43df-b20c-2e90d4830d84-kube-api-access-df9q8\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902938 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdfjx\" (UniqueName: \"kubernetes.io/projected/202442b7-241e-44ee-b24f-0eac63864890-kube-api-access-qdfjx\") pod \"machine-api-operator-5694c8668f-fdwnr\" (UID: \"202442b7-241e-44ee-b24f-0eac63864890\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902953 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skswt\" (UniqueName: \"kubernetes.io/projected/187c4133-36b2-4d56-be78-75d555af16a4-kube-api-access-skswt\") pod \"machine-config-controller-84d6567774-5wbmg\" (UID: \"187c4133-36b2-4d56-be78-75d555af16a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.902971 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903001 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-config\") pod \"controller-manager-879f6c89f-p7j66\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903017 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/58d51d91-5495-47c7-a6ed-9a8964688b49-available-featuregates\") pod \"openshift-config-operator-7777fb866f-tckgt\" (UID: \"58d51d91-5495-47c7-a6ed-9a8964688b49\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tckgt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903036 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-serving-cert\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903060 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a316203a-ecb5-465b-a364-c5b517f31dee-config\") pod \"authentication-operator-69f744f599-gjkn6\" (UID: \"a316203a-ecb5-465b-a364-c5b517f31dee\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903077 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903098 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-client-ca\") pod \"controller-manager-879f6c89f-p7j66\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903114 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903133 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a316203a-ecb5-465b-a364-c5b517f31dee-service-ca-bundle\") pod \"authentication-operator-69f744f599-gjkn6\" (UID: \"a316203a-ecb5-465b-a364-c5b517f31dee\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903153 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903171 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-serving-cert\") pod \"route-controller-manager-6576b87f9c-82j7d\" (UID: \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903188 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn6ht\" (UniqueName: \"kubernetes.io/projected/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-kube-api-access-nn6ht\") pod \"route-controller-manager-6576b87f9c-82j7d\" (UID: \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903216 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/202442b7-241e-44ee-b24f-0eac63864890-images\") pod \"machine-api-operator-5694c8668f-fdwnr\" (UID: \"202442b7-241e-44ee-b24f-0eac63864890\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903232 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a316203a-ecb5-465b-a364-c5b517f31dee-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-gjkn6\" (UID: \"a316203a-ecb5-465b-a364-c5b517f31dee\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903250 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nppkc\" (UniqueName: \"kubernetes.io/projected/a316203a-ecb5-465b-a364-c5b517f31dee-kube-api-access-nppkc\") pod \"authentication-operator-69f744f599-gjkn6\" (UID: \"a316203a-ecb5-465b-a364-c5b517f31dee\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903268 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/72d762df-fe7c-4966-a3fe-62904ca5b93a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-4zcht\" (UID: \"72d762df-fe7c-4966-a3fe-62904ca5b93a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903285 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903303 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a316203a-ecb5-465b-a364-c5b517f31dee-serving-cert\") pod \"authentication-operator-69f744f599-gjkn6\" (UID: \"a316203a-ecb5-465b-a364-c5b517f31dee\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903321 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9db38791-f017-4d4f-b9e9-08f3ccd38704-audit-dir\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.903339 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.904653 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.916543 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.920510 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.927552 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nmkts"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.927865 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.929153 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-2hmcd"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.929674 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nmkts" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.929794 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.938145 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2hmcd" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.938342 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-6sjk2"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.938649 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.941230 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-phmxg"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.944833 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.945624 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-6sjk2" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.947457 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-z5vx4"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.947489 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cm8l7"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.947503 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7gghw"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.947522 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p7j66"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.947536 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-j5nvb"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.947647 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-phmxg" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.948346 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-znbxz"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.952434 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.954529 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-nr7vc"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.971004 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.971323 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fdwnr"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.971862 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.979856 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9bdb5"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.980130 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-njhdl"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.981671 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-xsgbv"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.982248 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.993125 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.993494 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.996310 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-gjkn6"] Oct 13 21:13:42 crc kubenswrapper[4689]: I1013 21:13:42.999648 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8f84m"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.000641 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-dsbhf"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.001609 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2dd45"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.002569 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.003023 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.004266 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mql5t"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.004514 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/58d51d91-5495-47c7-a6ed-9a8964688b49-available-featuregates\") pod \"openshift-config-operator-7777fb866f-tckgt\" (UID: \"58d51d91-5495-47c7-a6ed-9a8964688b49\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tckgt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.004620 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-serving-cert\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.004693 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a316203a-ecb5-465b-a364-c5b517f31dee-config\") pod \"authentication-operator-69f744f599-gjkn6\" (UID: \"a316203a-ecb5-465b-a364-c5b517f31dee\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.004775 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.004850 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a316203a-ecb5-465b-a364-c5b517f31dee-service-ca-bundle\") pod \"authentication-operator-69f744f599-gjkn6\" (UID: \"a316203a-ecb5-465b-a364-c5b517f31dee\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.004921 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-client-ca\") pod \"controller-manager-879f6c89f-p7j66\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.004993 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.005064 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.007482 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-serving-cert\") pod \"route-controller-manager-6576b87f9c-82j7d\" (UID: \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.007624 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn6ht\" (UniqueName: \"kubernetes.io/projected/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-kube-api-access-nn6ht\") pod \"route-controller-manager-6576b87f9c-82j7d\" (UID: \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.007737 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/202442b7-241e-44ee-b24f-0eac63864890-images\") pod \"machine-api-operator-5694c8668f-fdwnr\" (UID: \"202442b7-241e-44ee-b24f-0eac63864890\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.008055 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a316203a-ecb5-465b-a364-c5b517f31dee-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-gjkn6\" (UID: \"a316203a-ecb5-465b-a364-c5b517f31dee\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.008161 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nppkc\" (UniqueName: \"kubernetes.io/projected/a316203a-ecb5-465b-a364-c5b517f31dee-kube-api-access-nppkc\") pod \"authentication-operator-69f744f599-gjkn6\" (UID: \"a316203a-ecb5-465b-a364-c5b517f31dee\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.008345 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/72d762df-fe7c-4966-a3fe-62904ca5b93a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-4zcht\" (UID: \"72d762df-fe7c-4966-a3fe-62904ca5b93a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.008810 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.008910 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.008989 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a316203a-ecb5-465b-a364-c5b517f31dee-serving-cert\") pod \"authentication-operator-69f744f599-gjkn6\" (UID: \"a316203a-ecb5-465b-a364-c5b517f31dee\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.009087 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9db38791-f017-4d4f-b9e9-08f3ccd38704-audit-dir\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.009163 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-serving-cert\") pod \"controller-manager-879f6c89f-p7j66\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.009238 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/202442b7-241e-44ee-b24f-0eac63864890-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fdwnr\" (UID: \"202442b7-241e-44ee-b24f-0eac63864890\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.009312 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.009427 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8gm8\" (UniqueName: \"kubernetes.io/projected/9db38791-f017-4d4f-b9e9-08f3ccd38704-kube-api-access-r8gm8\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.009924 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-config\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010316 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6v7qk\" (UniqueName: \"kubernetes.io/projected/58d51d91-5495-47c7-a6ed-9a8964688b49-kube-api-access-6v7qk\") pod \"openshift-config-operator-7777fb866f-tckgt\" (UID: \"58d51d91-5495-47c7-a6ed-9a8964688b49\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tckgt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010359 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9db38791-f017-4d4f-b9e9-08f3ccd38704-audit-dir\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010366 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-audit-policies\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.007543 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/58d51d91-5495-47c7-a6ed-9a8964688b49-available-featuregates\") pod \"openshift-config-operator-7777fb866f-tckgt\" (UID: \"58d51d91-5495-47c7-a6ed-9a8964688b49\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tckgt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010409 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010452 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010474 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-oauth-serving-cert\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010505 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-p7j66\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010522 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-config\") pod \"route-controller-manager-6576b87f9c-82j7d\" (UID: \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010568 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/202442b7-241e-44ee-b24f-0eac63864890-config\") pod \"machine-api-operator-5694c8668f-fdwnr\" (UID: \"202442b7-241e-44ee-b24f-0eac63864890\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010632 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxpcm\" (UniqueName: \"kubernetes.io/projected/c5168fba-bd94-4b2b-a4d4-886f93ea2988-kube-api-access-mxpcm\") pod \"openshift-controller-manager-operator-756b6f6bc6-ptbs8\" (UID: \"c5168fba-bd94-4b2b-a4d4-886f93ea2988\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ptbs8" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010657 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/72d762df-fe7c-4966-a3fe-62904ca5b93a-metrics-tls\") pod \"ingress-operator-5b745b69d9-4zcht\" (UID: \"72d762df-fe7c-4966-a3fe-62904ca5b93a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010673 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5168fba-bd94-4b2b-a4d4-886f93ea2988-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-ptbs8\" (UID: \"c5168fba-bd94-4b2b-a4d4-886f93ea2988\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ptbs8" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010704 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58d51d91-5495-47c7-a6ed-9a8964688b49-serving-cert\") pod \"openshift-config-operator-7777fb866f-tckgt\" (UID: \"58d51d91-5495-47c7-a6ed-9a8964688b49\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tckgt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010720 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5168fba-bd94-4b2b-a4d4-886f93ea2988-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-ptbs8\" (UID: \"c5168fba-bd94-4b2b-a4d4-886f93ea2988\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ptbs8" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010744 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sbkj\" (UniqueName: \"kubernetes.io/projected/72d762df-fe7c-4966-a3fe-62904ca5b93a-kube-api-access-8sbkj\") pod \"ingress-operator-5b745b69d9-4zcht\" (UID: \"72d762df-fe7c-4966-a3fe-62904ca5b93a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010801 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mphjw\" (UniqueName: \"kubernetes.io/projected/1f429fdd-330f-4526-9fcd-fb6293286256-kube-api-access-mphjw\") pod \"downloads-7954f5f757-xsgbv\" (UID: \"1f429fdd-330f-4526-9fcd-fb6293286256\") " pod="openshift-console/downloads-7954f5f757-xsgbv" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010826 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/187c4133-36b2-4d56-be78-75d555af16a4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5wbmg\" (UID: \"187c4133-36b2-4d56-be78-75d555af16a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010847 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010865 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-oauth-config\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010882 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8rkp\" (UniqueName: \"kubernetes.io/projected/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-kube-api-access-c8rkp\") pod \"controller-manager-879f6c89f-p7j66\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010902 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010919 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-service-ca\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010941 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-client-ca\") pod \"route-controller-manager-6576b87f9c-82j7d\" (UID: \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010970 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/187c4133-36b2-4d56-be78-75d555af16a4-proxy-tls\") pod \"machine-config-controller-84d6567774-5wbmg\" (UID: \"187c4133-36b2-4d56-be78-75d555af16a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010999 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-trusted-ca-bundle\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.011005 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.011025 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/72d762df-fe7c-4966-a3fe-62904ca5b93a-trusted-ca\") pod \"ingress-operator-5b745b69d9-4zcht\" (UID: \"72d762df-fe7c-4966-a3fe-62904ca5b93a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.011117 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-df9q8\" (UniqueName: \"kubernetes.io/projected/0d4c9845-75c1-43df-b20c-2e90d4830d84-kube-api-access-df9q8\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.011139 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.011158 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdfjx\" (UniqueName: \"kubernetes.io/projected/202442b7-241e-44ee-b24f-0eac63864890-kube-api-access-qdfjx\") pod \"machine-api-operator-5694c8668f-fdwnr\" (UID: \"202442b7-241e-44ee-b24f-0eac63864890\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.011177 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skswt\" (UniqueName: \"kubernetes.io/projected/187c4133-36b2-4d56-be78-75d555af16a4-kube-api-access-skswt\") pod \"machine-config-controller-84d6567774-5wbmg\" (UID: \"187c4133-36b2-4d56-be78-75d555af16a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.011212 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-config\") pod \"controller-manager-879f6c89f-p7j66\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.010317 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a316203a-ecb5-465b-a364-c5b517f31dee-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-gjkn6\" (UID: \"a316203a-ecb5-465b-a364-c5b517f31dee\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.008638 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/202442b7-241e-44ee-b24f-0eac63864890-images\") pod \"machine-api-operator-5694c8668f-fdwnr\" (UID: \"202442b7-241e-44ee-b24f-0eac63864890\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.008241 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.013166 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-fw54p"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.013744 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/202442b7-241e-44ee-b24f-0eac63864890-config\") pod \"machine-api-operator-5694c8668f-fdwnr\" (UID: \"202442b7-241e-44ee-b24f-0eac63864890\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.013194 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.014035 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-config\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.014557 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-fw54p" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.012761 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-audit-policies\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.014944 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-config\") pod \"controller-manager-879f6c89f-p7j66\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.008063 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.016177 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-trusted-ca-bundle\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.007190 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a316203a-ecb5-465b-a364-c5b517f31dee-service-ca-bundle\") pod \"authentication-operator-69f744f599-gjkn6\" (UID: \"a316203a-ecb5-465b-a364-c5b517f31dee\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.016923 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-client-ca\") pod \"route-controller-manager-6576b87f9c-82j7d\" (UID: \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.017707 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-p7j66\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.018224 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-oauth-serving-cert\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.018514 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.018715 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/202442b7-241e-44ee-b24f-0eac63864890-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fdwnr\" (UID: \"202442b7-241e-44ee-b24f-0eac63864890\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.018879 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vksbd"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.018915 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.019169 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5168fba-bd94-4b2b-a4d4-886f93ea2988-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-ptbs8\" (UID: \"c5168fba-bd94-4b2b-a4d4-886f93ea2988\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ptbs8" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.019495 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-config\") pod \"route-controller-manager-6576b87f9c-82j7d\" (UID: \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.006965 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-client-ca\") pod \"controller-manager-879f6c89f-p7j66\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.019966 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/187c4133-36b2-4d56-be78-75d555af16a4-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5wbmg\" (UID: \"187c4133-36b2-4d56-be78-75d555af16a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.020264 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.021568 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.021791 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m87tp"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.022919 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ptbs8"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.023789 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.024039 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2kfvc"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.024868 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a316203a-ecb5-465b-a364-c5b517f31dee-config\") pod \"authentication-operator-69f744f599-gjkn6\" (UID: \"a316203a-ecb5-465b-a364-c5b517f31dee\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.025193 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5168fba-bd94-4b2b-a4d4-886f93ea2988-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-ptbs8\" (UID: \"c5168fba-bd94-4b2b-a4d4-886f93ea2988\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ptbs8" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.025780 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.027142 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a316203a-ecb5-465b-a364-c5b517f31dee-serving-cert\") pod \"authentication-operator-69f744f599-gjkn6\" (UID: \"a316203a-ecb5-465b-a364-c5b517f31dee\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.027416 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58d51d91-5495-47c7-a6ed-9a8964688b49-serving-cert\") pod \"openshift-config-operator-7777fb866f-tckgt\" (UID: \"58d51d91-5495-47c7-a6ed-9a8964688b49\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tckgt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.027532 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-serving-cert\") pod \"route-controller-manager-6576b87f9c-82j7d\" (UID: \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.027571 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-oauth-config\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.027652 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.027682 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-serving-cert\") pod \"controller-manager-879f6c89f-p7j66\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.027708 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.028014 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.029153 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-lbpft"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.031841 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.032026 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.033329 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.033607 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-serving-cert\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.033730 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.036902 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qtbn4"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.037078 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/187c4133-36b2-4d56-be78-75d555af16a4-proxy-tls\") pod \"machine-config-controller-84d6567774-5wbmg\" (UID: \"187c4133-36b2-4d56-be78-75d555af16a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.038257 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-tckgt"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.040442 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.042134 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-2hmcd"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.042253 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-fw54p"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.044048 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-6sjk2"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.047730 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-lbpft"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.048279 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.049733 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nmkts"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.051509 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-z7dn5"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.053622 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.055649 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-5bzn7"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.056629 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-5bzn7" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.057677 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/72d762df-fe7c-4966-a3fe-62904ca5b93a-trusted-ca\") pod \"ingress-operator-5b745b69d9-4zcht\" (UID: \"72d762df-fe7c-4966-a3fe-62904ca5b93a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.058721 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-5bzn7"] Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.061085 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.066216 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/72d762df-fe7c-4966-a3fe-62904ca5b93a-metrics-tls\") pod \"ingress-operator-5b745b69d9-4zcht\" (UID: \"72d762df-fe7c-4966-a3fe-62904ca5b93a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.081521 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.101648 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.124821 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.141710 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.150652 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-service-ca\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.162696 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.182210 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.201494 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.220799 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.241288 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.301556 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.321941 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.342530 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.361742 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.381543 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.409689 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.422694 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.441852 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.461662 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.482383 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.502078 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.523540 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.542185 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.562329 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.583709 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.601889 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.622140 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.641936 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.663379 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.681802 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.704363 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.723420 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.742481 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.761995 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.782642 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.802781 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.822431 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.841942 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.861609 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.882947 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.900082 4689 request.go:700] Waited for 1.006087143s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager-operator/secrets?fieldSelector=metadata.name%3Dkube-controller-manager-operator-dockercfg-gkqpw&limit=500&resourceVersion=0 Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.902843 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.922708 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.941849 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.961654 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 13 21:13:43 crc kubenswrapper[4689]: I1013 21:13:43.982183 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.008088 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.022624 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.042826 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.062431 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.082071 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.103258 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.121706 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.143944 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.163036 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.182848 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.201917 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.222358 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.243057 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.262764 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.282277 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.302060 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.322187 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.342504 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.362680 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.382194 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.401422 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.422458 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.442205 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.462797 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.482371 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.502665 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.522778 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.542011 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.562182 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.583730 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.602692 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.646888 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn6ht\" (UniqueName: \"kubernetes.io/projected/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-kube-api-access-nn6ht\") pod \"route-controller-manager-6576b87f9c-82j7d\" (UID: \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.672946 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nppkc\" (UniqueName: \"kubernetes.io/projected/a316203a-ecb5-465b-a364-c5b517f31dee-kube-api-access-nppkc\") pod \"authentication-operator-69f744f599-gjkn6\" (UID: \"a316203a-ecb5-465b-a364-c5b517f31dee\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.693807 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/72d762df-fe7c-4966-a3fe-62904ca5b93a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-4zcht\" (UID: \"72d762df-fe7c-4966-a3fe-62904ca5b93a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.721190 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8gm8\" (UniqueName: \"kubernetes.io/projected/9db38791-f017-4d4f-b9e9-08f3ccd38704-kube-api-access-r8gm8\") pod \"oauth-openshift-558db77b4-7gghw\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.733213 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6v7qk\" (UniqueName: \"kubernetes.io/projected/58d51d91-5495-47c7-a6ed-9a8964688b49-kube-api-access-6v7qk\") pod \"openshift-config-operator-7777fb866f-tckgt\" (UID: \"58d51d91-5495-47c7-a6ed-9a8964688b49\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tckgt" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.743002 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.746338 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxpcm\" (UniqueName: \"kubernetes.io/projected/c5168fba-bd94-4b2b-a4d4-886f93ea2988-kube-api-access-mxpcm\") pod \"openshift-controller-manager-operator-756b6f6bc6-ptbs8\" (UID: \"c5168fba-bd94-4b2b-a4d4-886f93ea2988\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ptbs8" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.774541 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.783419 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.786200 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-df9q8\" (UniqueName: \"kubernetes.io/projected/0d4c9845-75c1-43df-b20c-2e90d4830d84-kube-api-access-df9q8\") pod \"console-f9d7485db-z5vx4\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.802360 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.839492 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8rkp\" (UniqueName: \"kubernetes.io/projected/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-kube-api-access-c8rkp\") pod \"controller-manager-879f6c89f-p7j66\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.857359 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mphjw\" (UniqueName: \"kubernetes.io/projected/1f429fdd-330f-4526-9fcd-fb6293286256-kube-api-access-mphjw\") pod \"downloads-7954f5f757-xsgbv\" (UID: \"1f429fdd-330f-4526-9fcd-fb6293286256\") " pod="openshift-console/downloads-7954f5f757-xsgbv" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.858016 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tckgt" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.865705 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-xsgbv" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.881955 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.883021 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sbkj\" (UniqueName: \"kubernetes.io/projected/72d762df-fe7c-4966-a3fe-62904ca5b93a-kube-api-access-8sbkj\") pod \"ingress-operator-5b745b69d9-4zcht\" (UID: \"72d762df-fe7c-4966-a3fe-62904ca5b93a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.888650 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.892997 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.900368 4689 request.go:700] Waited for 1.880573532s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/serviceaccounts/machine-config-controller/token Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.904200 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdfjx\" (UniqueName: \"kubernetes.io/projected/202442b7-241e-44ee-b24f-0eac63864890-kube-api-access-qdfjx\") pod \"machine-api-operator-5694c8668f-fdwnr\" (UID: \"202442b7-241e-44ee-b24f-0eac63864890\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.920420 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skswt\" (UniqueName: \"kubernetes.io/projected/187c4133-36b2-4d56-be78-75d555af16a4-kube-api-access-skswt\") pod \"machine-config-controller-84d6567774-5wbmg\" (UID: \"187c4133-36b2-4d56-be78-75d555af16a4\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.923086 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.923283 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.942911 4689 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.954678 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.957069 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ptbs8" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.963046 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.983379 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 13 21:13:44 crc kubenswrapper[4689]: I1013 21:13:44.985358 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.001988 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.019851 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d"] Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.021566 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.041994 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.139693 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/51d19dc5-9ac9-4b48-93ae-5d29535e1df6-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-m4j9c\" (UID: \"51d19dc5-9ac9-4b48-93ae-5d29535e1df6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.139738 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-registry-tls\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.139761 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2c78b93f-8347-4c41-a948-bacab534efdf-registry-certificates\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.139779 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/42104006-5aa2-4e76-9a90-5402fb280c09-etcd-ca\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.139797 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/42104006-5aa2-4e76-9a90-5402fb280c09-etcd-client\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.139814 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba56493b-6d33-4bee-bbb2-a431a7622cdb-config\") pod \"machine-approver-56656f9798-skz8g\" (UID: \"ba56493b-6d33-4bee-bbb2-a431a7622cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.139836 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ba56493b-6d33-4bee-bbb2-a431a7622cdb-machine-approver-tls\") pod \"machine-approver-56656f9798-skz8g\" (UID: \"ba56493b-6d33-4bee-bbb2-a431a7622cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.139864 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2d2l\" (UniqueName: \"kubernetes.io/projected/89f4bdbb-1a86-42b4-8067-7be1209a54cc-kube-api-access-k2d2l\") pod \"cluster-samples-operator-665b6dd947-cm8l7\" (UID: \"89f4bdbb-1a86-42b4-8067-7be1209a54cc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cm8l7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.139884 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/110831e6-2732-4fe8-8f36-3505ff66495f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.139900 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlkkz\" (UniqueName: \"kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-kube-api-access-rlkkz\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.139923 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-node-pullsecrets\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.139941 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-etcd-serving-ca\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.139956 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/42104006-5aa2-4e76-9a90-5402fb280c09-etcd-service-ca\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.139983 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e86a349-8bfc-4667-8213-a034dc8c1a7b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-njhdl\" (UID: \"5e86a349-8bfc-4667-8213-a034dc8c1a7b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-njhdl" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140001 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-image-import-ca\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140018 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e86a349-8bfc-4667-8213-a034dc8c1a7b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-njhdl\" (UID: \"5e86a349-8bfc-4667-8213-a034dc8c1a7b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-njhdl" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140036 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/110831e6-2732-4fe8-8f36-3505ff66495f-audit-policies\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140051 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-bound-sa-token\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140069 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ae1eb6bd-4b07-4540-b12c-c5fa318f5b64-serving-cert\") pod \"console-operator-58897d9998-nr7vc\" (UID: \"ae1eb6bd-4b07-4540-b12c-c5fa318f5b64\") " pod="openshift-console-operator/console-operator-58897d9998-nr7vc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140095 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e86a349-8bfc-4667-8213-a034dc8c1a7b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-njhdl\" (UID: \"5e86a349-8bfc-4667-8213-a034dc8c1a7b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-njhdl" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140117 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140136 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/51d19dc5-9ac9-4b48-93ae-5d29535e1df6-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-m4j9c\" (UID: \"51d19dc5-9ac9-4b48-93ae-5d29535e1df6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140156 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/89f4bdbb-1a86-42b4-8067-7be1209a54cc-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-cm8l7\" (UID: \"89f4bdbb-1a86-42b4-8067-7be1209a54cc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cm8l7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140179 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk88l\" (UniqueName: \"kubernetes.io/projected/ba56493b-6d33-4bee-bbb2-a431a7622cdb-kube-api-access-zk88l\") pod \"machine-approver-56656f9798-skz8g\" (UID: \"ba56493b-6d33-4bee-bbb2-a431a7622cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140196 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cglhk\" (UniqueName: \"kubernetes.io/projected/51d19dc5-9ac9-4b48-93ae-5d29535e1df6-kube-api-access-cglhk\") pod \"cluster-image-registry-operator-dc59b4c8b-m4j9c\" (UID: \"51d19dc5-9ac9-4b48-93ae-5d29535e1df6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140212 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-encryption-config\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140231 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k46pj\" (UniqueName: \"kubernetes.io/projected/110831e6-2732-4fe8-8f36-3505ff66495f-kube-api-access-k46pj\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140263 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2c78b93f-8347-4c41-a948-bacab534efdf-ca-trust-extracted\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140278 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42104006-5aa2-4e76-9a90-5402fb280c09-config\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140296 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/110831e6-2732-4fe8-8f36-3505ff66495f-serving-cert\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140332 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ba56493b-6d33-4bee-bbb2-a431a7622cdb-auth-proxy-config\") pod \"machine-approver-56656f9798-skz8g\" (UID: \"ba56493b-6d33-4bee-bbb2-a431a7622cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140348 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-config\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140392 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wg85\" (UniqueName: \"kubernetes.io/projected/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-kube-api-access-6wg85\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140411 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fdfc234-e5da-413a-89e5-226f99fe29af-config\") pod \"openshift-apiserver-operator-796bbdcf4f-znbxz\" (UID: \"7fdfc234-e5da-413a-89e5-226f99fe29af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-znbxz" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140438 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/42104006-5aa2-4e76-9a90-5402fb280c09-serving-cert\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140453 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-serving-cert\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140468 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-audit-dir\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140483 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2c78b93f-8347-4c41-a948-bacab534efdf-trusted-ca\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140499 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/110831e6-2732-4fe8-8f36-3505ff66495f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140518 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae1eb6bd-4b07-4540-b12c-c5fa318f5b64-config\") pod \"console-operator-58897d9998-nr7vc\" (UID: \"ae1eb6bd-4b07-4540-b12c-c5fa318f5b64\") " pod="openshift-console-operator/console-operator-58897d9998-nr7vc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140533 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8l2xt\" (UniqueName: \"kubernetes.io/projected/ae1eb6bd-4b07-4540-b12c-c5fa318f5b64-kube-api-access-8l2xt\") pod \"console-operator-58897d9998-nr7vc\" (UID: \"ae1eb6bd-4b07-4540-b12c-c5fa318f5b64\") " pod="openshift-console-operator/console-operator-58897d9998-nr7vc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140550 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2c78b93f-8347-4c41-a948-bacab534efdf-installation-pull-secrets\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140573 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/51d19dc5-9ac9-4b48-93ae-5d29535e1df6-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-m4j9c\" (UID: \"51d19dc5-9ac9-4b48-93ae-5d29535e1df6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140603 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/110831e6-2732-4fe8-8f36-3505ff66495f-audit-dir\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140625 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/110831e6-2732-4fe8-8f36-3505ff66495f-encryption-config\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140643 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ae1eb6bd-4b07-4540-b12c-c5fa318f5b64-trusted-ca\") pod \"console-operator-58897d9998-nr7vc\" (UID: \"ae1eb6bd-4b07-4540-b12c-c5fa318f5b64\") " pod="openshift-console-operator/console-operator-58897d9998-nr7vc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140658 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/110831e6-2732-4fe8-8f36-3505ff66495f-etcd-client\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140681 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fdfc234-e5da-413a-89e5-226f99fe29af-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-znbxz\" (UID: \"7fdfc234-e5da-413a-89e5-226f99fe29af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-znbxz" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140698 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqcqv\" (UniqueName: \"kubernetes.io/projected/42104006-5aa2-4e76-9a90-5402fb280c09-kube-api-access-qqcqv\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140715 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-audit\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140732 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgp8l\" (UniqueName: \"kubernetes.io/projected/7fdfc234-e5da-413a-89e5-226f99fe29af-kube-api-access-wgp8l\") pod \"openshift-apiserver-operator-796bbdcf4f-znbxz\" (UID: \"7fdfc234-e5da-413a-89e5-226f99fe29af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-znbxz" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140756 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-etcd-client\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.140771 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-trusted-ca-bundle\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: E1013 21:13:45.141677 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:45.641660608 +0000 UTC m=+142.559905693 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.180506 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-gjkn6"] Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.203557 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.242033 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:45 crc kubenswrapper[4689]: E1013 21:13:45.242547 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:45.742521765 +0000 UTC m=+142.660766850 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.242975 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2d2l\" (UniqueName: \"kubernetes.io/projected/89f4bdbb-1a86-42b4-8067-7be1209a54cc-kube-api-access-k2d2l\") pod \"cluster-samples-operator-665b6dd947-cm8l7\" (UID: \"89f4bdbb-1a86-42b4-8067-7be1209a54cc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cm8l7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.243003 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/110831e6-2732-4fe8-8f36-3505ff66495f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.243025 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/5a4ca39a-9c29-4904-aa12-cca8aff1453a-mountpoint-dir\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.243042 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/42104006-5aa2-4e76-9a90-5402fb280c09-etcd-service-ca\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.243059 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e86a349-8bfc-4667-8213-a034dc8c1a7b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-njhdl\" (UID: \"5e86a349-8bfc-4667-8213-a034dc8c1a7b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-njhdl" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.243076 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74959b1a-d8f4-415f-a901-4c5c162c41c9-serving-cert\") pod \"service-ca-operator-777779d784-2hmcd\" (UID: \"74959b1a-d8f4-415f-a901-4c5c162c41c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2hmcd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.243092 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/69af70ca-2b63-4a93-86de-d9e5f44dffb3-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-vksbd\" (UID: \"69af70ca-2b63-4a93-86de-d9e5f44dffb3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vksbd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.243109 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-bound-sa-token\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.243125 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e86a349-8bfc-4667-8213-a034dc8c1a7b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-njhdl\" (UID: \"5e86a349-8bfc-4667-8213-a034dc8c1a7b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-njhdl" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.243143 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/110831e6-2732-4fe8-8f36-3505ff66495f-audit-policies\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.243278 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/5a4ca39a-9c29-4904-aa12-cca8aff1453a-plugins-dir\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.243860 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/1c2ff4c0-e878-4984-a12a-1b4f8cb3bc54-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-nmkts\" (UID: \"1c2ff4c0-e878-4984-a12a-1b4f8cb3bc54\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nmkts" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.243910 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/69b8063f-9b07-404d-91d8-e143fa0aa521-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9dxhv\" (UID: \"69b8063f-9b07-404d-91d8-e143fa0aa521\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.243951 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.243977 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a557b09e-dd8c-4ad4-8a24-d817c186d537-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qtbn4\" (UID: \"a557b09e-dd8c-4ad4-8a24-d817c186d537\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qtbn4" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.243997 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7fzb\" (UniqueName: \"kubernetes.io/projected/5a4ca39a-9c29-4904-aa12-cca8aff1453a-kube-api-access-j7fzb\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.244017 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2d213635-9d7d-4d87-9b11-6744bbac9824-metrics-tls\") pod \"dns-operator-744455d44c-6sjk2\" (UID: \"2d213635-9d7d-4d87-9b11-6744bbac9824\") " pod="openshift-dns-operator/dns-operator-744455d44c-6sjk2" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.244039 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cxtm\" (UniqueName: \"kubernetes.io/projected/4442f39b-048e-493e-86b8-256c5c39dca5-kube-api-access-2cxtm\") pod \"package-server-manager-789f6589d5-2kfvc\" (UID: \"4442f39b-048e-493e-86b8-256c5c39dca5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2kfvc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.244075 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/89f4bdbb-1a86-42b4-8067-7be1209a54cc-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-cm8l7\" (UID: \"89f4bdbb-1a86-42b4-8067-7be1209a54cc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cm8l7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.244095 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk88l\" (UniqueName: \"kubernetes.io/projected/ba56493b-6d33-4bee-bbb2-a431a7622cdb-kube-api-access-zk88l\") pod \"machine-approver-56656f9798-skz8g\" (UID: \"ba56493b-6d33-4bee-bbb2-a431a7622cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.246102 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/110831e6-2732-4fe8-8f36-3505ff66495f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: E1013 21:13:45.246360 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:45.746338709 +0000 UTC m=+142.664583794 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.246376 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e86a349-8bfc-4667-8213-a034dc8c1a7b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-njhdl\" (UID: \"5e86a349-8bfc-4667-8213-a034dc8c1a7b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-njhdl" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.246477 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/110831e6-2732-4fe8-8f36-3505ff66495f-audit-policies\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.246730 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/42104006-5aa2-4e76-9a90-5402fb280c09-etcd-service-ca\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.250944 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/89f4bdbb-1a86-42b4-8067-7be1209a54cc-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-cm8l7\" (UID: \"89f4bdbb-1a86-42b4-8067-7be1209a54cc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cm8l7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.253994 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-z5vx4"] Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.254706 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k46pj\" (UniqueName: \"kubernetes.io/projected/110831e6-2732-4fe8-8f36-3505ff66495f-kube-api-access-k46pj\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.254799 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/24d70a59-b89a-4f25-8033-38d879cfe981-images\") pod \"machine-config-operator-74547568cd-p2tmp\" (UID: \"24d70a59-b89a-4f25-8033-38d879cfe981\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.254824 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3220bc21-2d0b-42ac-8970-ce9e0d52d896-srv-cert\") pod \"catalog-operator-68c6474976-s6x6p\" (UID: \"3220bc21-2d0b-42ac-8970-ce9e0d52d896\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.254844 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-secret-volume\") pod \"collect-profiles-29339820-8wtvj\" (UID: \"62c53fad-57ac-4c62-86aa-f73e4e35b1f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.254875 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-config\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.254904 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnvj5\" (UniqueName: \"kubernetes.io/projected/89108982-aec6-4c39-a675-508c22d2bf80-kube-api-access-vnvj5\") pod \"router-default-5444994796-nlljk\" (UID: \"89108982-aec6-4c39-a675-508c22d2bf80\") " pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.254929 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/89108982-aec6-4c39-a675-508c22d2bf80-default-certificate\") pod \"router-default-5444994796-nlljk\" (UID: \"89108982-aec6-4c39-a675-508c22d2bf80\") " pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.254957 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wg85\" (UniqueName: \"kubernetes.io/projected/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-kube-api-access-6wg85\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.254982 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/42104006-5aa2-4e76-9a90-5402fb280c09-serving-cert\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.255006 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-serving-cert\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.255031 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/110831e6-2732-4fe8-8f36-3505ff66495f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.255053 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/89108982-aec6-4c39-a675-508c22d2bf80-service-ca-bundle\") pod \"router-default-5444994796-nlljk\" (UID: \"89108982-aec6-4c39-a675-508c22d2bf80\") " pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.255096 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8tzj\" (UniqueName: \"kubernetes.io/projected/d3229a49-3424-464a-b479-460fb0a21620-kube-api-access-h8tzj\") pod \"marketplace-operator-79b997595-mql5t\" (UID: \"d3229a49-3424-464a-b479-460fb0a21620\") " pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.255122 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae1eb6bd-4b07-4540-b12c-c5fa318f5b64-config\") pod \"console-operator-58897d9998-nr7vc\" (UID: \"ae1eb6bd-4b07-4540-b12c-c5fa318f5b64\") " pod="openshift-console-operator/console-operator-58897d9998-nr7vc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.255144 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2c78b93f-8347-4c41-a948-bacab534efdf-installation-pull-secrets\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.255182 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3220bc21-2d0b-42ac-8970-ce9e0d52d896-profile-collector-cert\") pod \"catalog-operator-68c6474976-s6x6p\" (UID: \"3220bc21-2d0b-42ac-8970-ce9e0d52d896\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256367 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/69b8063f-9b07-404d-91d8-e143fa0aa521-srv-cert\") pod \"olm-operator-6b444d44fb-9dxhv\" (UID: \"69b8063f-9b07-404d-91d8-e143fa0aa521\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256424 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99ddc02a-f527-40a5-a6dd-330e712e955d-config-volume\") pod \"dns-default-fw54p\" (UID: \"99ddc02a-f527-40a5-a6dd-330e712e955d\") " pod="openshift-dns/dns-default-fw54p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256477 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fdfc234-e5da-413a-89e5-226f99fe29af-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-znbxz\" (UID: \"7fdfc234-e5da-413a-89e5-226f99fe29af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-znbxz" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256500 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/89108982-aec6-4c39-a675-508c22d2bf80-stats-auth\") pod \"router-default-5444994796-nlljk\" (UID: \"89108982-aec6-4c39-a675-508c22d2bf80\") " pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256538 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-audit\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256564 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/958def5c-022a-4947-b863-499dad6954dc-signing-key\") pod \"service-ca-9c57cc56f-z7dn5\" (UID: \"958def5c-022a-4947-b863-499dad6954dc\") " pod="openshift-service-ca/service-ca-9c57cc56f-z7dn5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256600 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e67221f8-ac2e-425b-8cb4-1bd164f4c2a7-node-bootstrap-token\") pod \"machine-config-server-phmxg\" (UID: \"e67221f8-ac2e-425b-8cb4-1bd164f4c2a7\") " pod="openshift-machine-config-operator/machine-config-server-phmxg" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256623 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2c78b93f-8347-4c41-a948-bacab534efdf-registry-certificates\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256646 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/51d19dc5-9ac9-4b48-93ae-5d29535e1df6-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-m4j9c\" (UID: \"51d19dc5-9ac9-4b48-93ae-5d29535e1df6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256666 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pxhk\" (UniqueName: \"kubernetes.io/projected/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-kube-api-access-6pxhk\") pod \"collect-profiles-29339820-8wtvj\" (UID: \"62c53fad-57ac-4c62-86aa-f73e4e35b1f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256686 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-registry-tls\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256705 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/42104006-5aa2-4e76-9a90-5402fb280c09-etcd-client\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256726 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhwjl\" (UniqueName: \"kubernetes.io/projected/74959b1a-d8f4-415f-a901-4c5c162c41c9-kube-api-access-fhwjl\") pod \"service-ca-operator-777779d784-2hmcd\" (UID: \"74959b1a-d8f4-415f-a901-4c5c162c41c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2hmcd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256763 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/69af70ca-2b63-4a93-86de-d9e5f44dffb3-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-vksbd\" (UID: \"69af70ca-2b63-4a93-86de-d9e5f44dffb3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vksbd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256787 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/5a4ca39a-9c29-4904-aa12-cca8aff1453a-csi-data-dir\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256811 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/958def5c-022a-4947-b863-499dad6954dc-signing-cabundle\") pod \"service-ca-9c57cc56f-z7dn5\" (UID: \"958def5c-022a-4947-b863-499dad6954dc\") " pod="openshift-service-ca/service-ca-9c57cc56f-z7dn5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256832 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8k7zw\" (UniqueName: \"kubernetes.io/projected/958def5c-022a-4947-b863-499dad6954dc-kube-api-access-8k7zw\") pod \"service-ca-9c57cc56f-z7dn5\" (UID: \"958def5c-022a-4947-b863-499dad6954dc\") " pod="openshift-service-ca/service-ca-9c57cc56f-z7dn5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256870 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlkkz\" (UniqueName: \"kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-kube-api-access-rlkkz\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256892 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-node-pullsecrets\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256918 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-etcd-serving-ca\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.256984 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-image-import-ca\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257009 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/299e04b4-ceeb-4acb-bd40-516a2eef1486-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-2dd45\" (UID: \"299e04b4-ceeb-4acb-bd40-516a2eef1486\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2dd45" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257035 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzsv9\" (UniqueName: \"kubernetes.io/projected/adcf30c3-b406-4735-8488-f8380a7ab1e8-kube-api-access-pzsv9\") pod \"packageserver-d55dfcdfc-q26mq\" (UID: \"adcf30c3-b406-4735-8488-f8380a7ab1e8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257065 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/99ddc02a-f527-40a5-a6dd-330e712e955d-metrics-tls\") pod \"dns-default-fw54p\" (UID: \"99ddc02a-f527-40a5-a6dd-330e712e955d\") " pod="openshift-dns/dns-default-fw54p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257100 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ae1eb6bd-4b07-4540-b12c-c5fa318f5b64-serving-cert\") pod \"console-operator-58897d9998-nr7vc\" (UID: \"ae1eb6bd-4b07-4540-b12c-c5fa318f5b64\") " pod="openshift-console-operator/console-operator-58897d9998-nr7vc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257140 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqzl4\" (UniqueName: \"kubernetes.io/projected/a557b09e-dd8c-4ad4-8a24-d817c186d537-kube-api-access-rqzl4\") pod \"multus-admission-controller-857f4d67dd-qtbn4\" (UID: \"a557b09e-dd8c-4ad4-8a24-d817c186d537\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qtbn4" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257172 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e86a349-8bfc-4667-8213-a034dc8c1a7b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-njhdl\" (UID: \"5e86a349-8bfc-4667-8213-a034dc8c1a7b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-njhdl" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257196 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69af70ca-2b63-4a93-86de-d9e5f44dffb3-config\") pod \"kube-apiserver-operator-766d6c64bb-vksbd\" (UID: \"69af70ca-2b63-4a93-86de-d9e5f44dffb3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vksbd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257221 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvq4x\" (UniqueName: \"kubernetes.io/projected/69b8063f-9b07-404d-91d8-e143fa0aa521-kube-api-access-rvq4x\") pod \"olm-operator-6b444d44fb-9dxhv\" (UID: \"69b8063f-9b07-404d-91d8-e143fa0aa521\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257257 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/adcf30c3-b406-4735-8488-f8380a7ab1e8-webhook-cert\") pod \"packageserver-d55dfcdfc-q26mq\" (UID: \"adcf30c3-b406-4735-8488-f8380a7ab1e8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257280 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4442f39b-048e-493e-86b8-256c5c39dca5-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2kfvc\" (UID: \"4442f39b-048e-493e-86b8-256c5c39dca5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2kfvc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257308 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/51d19dc5-9ac9-4b48-93ae-5d29535e1df6-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-m4j9c\" (UID: \"51d19dc5-9ac9-4b48-93ae-5d29535e1df6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257329 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d3229a49-3424-464a-b479-460fb0a21620-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mql5t\" (UID: \"d3229a49-3424-464a-b479-460fb0a21620\") " pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257353 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgc9r\" (UniqueName: \"kubernetes.io/projected/3220bc21-2d0b-42ac-8970-ce9e0d52d896-kube-api-access-mgc9r\") pod \"catalog-operator-68c6474976-s6x6p\" (UID: \"3220bc21-2d0b-42ac-8970-ce9e0d52d896\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257371 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzgqs\" (UniqueName: \"kubernetes.io/projected/e67221f8-ac2e-425b-8cb4-1bd164f4c2a7-kube-api-access-rzgqs\") pod \"machine-config-server-phmxg\" (UID: \"e67221f8-ac2e-425b-8cb4-1bd164f4c2a7\") " pod="openshift-machine-config-operator/machine-config-server-phmxg" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257391 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7efd4908-44a4-4b78-84cc-46c7578ee5a7-cert\") pod \"ingress-canary-5bzn7\" (UID: \"7efd4908-44a4-4b78-84cc-46c7578ee5a7\") " pod="openshift-ingress-canary/ingress-canary-5bzn7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257429 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cglhk\" (UniqueName: \"kubernetes.io/projected/51d19dc5-9ac9-4b48-93ae-5d29535e1df6-kube-api-access-cglhk\") pod \"cluster-image-registry-operator-dc59b4c8b-m4j9c\" (UID: \"51d19dc5-9ac9-4b48-93ae-5d29535e1df6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257451 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-encryption-config\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257469 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-config-volume\") pod \"collect-profiles-29339820-8wtvj\" (UID: \"62c53fad-57ac-4c62-86aa-f73e4e35b1f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257537 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2c78b93f-8347-4c41-a948-bacab534efdf-ca-trust-extracted\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257559 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42104006-5aa2-4e76-9a90-5402fb280c09-config\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257612 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hmmw\" (UniqueName: \"kubernetes.io/projected/299e04b4-ceeb-4acb-bd40-516a2eef1486-kube-api-access-9hmmw\") pod \"kube-storage-version-migrator-operator-b67b599dd-2dd45\" (UID: \"299e04b4-ceeb-4acb-bd40-516a2eef1486\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2dd45" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257654 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ba56493b-6d33-4bee-bbb2-a431a7622cdb-auth-proxy-config\") pod \"machine-approver-56656f9798-skz8g\" (UID: \"ba56493b-6d33-4bee-bbb2-a431a7622cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257676 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/110831e6-2732-4fe8-8f36-3505ff66495f-serving-cert\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257700 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fdfc234-e5da-413a-89e5-226f99fe29af-config\") pod \"openshift-apiserver-operator-796bbdcf4f-znbxz\" (UID: \"7fdfc234-e5da-413a-89e5-226f99fe29af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-znbxz" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257726 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2c78b93f-8347-4c41-a948-bacab534efdf-trusted-ca\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257747 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-audit-dir\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257766 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pn9zb\" (UniqueName: \"kubernetes.io/projected/7efd4908-44a4-4b78-84cc-46c7578ee5a7-kube-api-access-pn9zb\") pod \"ingress-canary-5bzn7\" (UID: \"7efd4908-44a4-4b78-84cc-46c7578ee5a7\") " pod="openshift-ingress-canary/ingress-canary-5bzn7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257786 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nprzx\" (UniqueName: \"kubernetes.io/projected/24d70a59-b89a-4f25-8033-38d879cfe981-kube-api-access-nprzx\") pod \"machine-config-operator-74547568cd-p2tmp\" (UID: \"24d70a59-b89a-4f25-8033-38d879cfe981\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257828 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3229a49-3424-464a-b479-460fb0a21620-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mql5t\" (UID: \"d3229a49-3424-464a-b479-460fb0a21620\") " pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257847 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e67221f8-ac2e-425b-8cb4-1bd164f4c2a7-certs\") pod \"machine-config-server-phmxg\" (UID: \"e67221f8-ac2e-425b-8cb4-1bd164f4c2a7\") " pod="openshift-machine-config-operator/machine-config-server-phmxg" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257869 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8l2xt\" (UniqueName: \"kubernetes.io/projected/ae1eb6bd-4b07-4540-b12c-c5fa318f5b64-kube-api-access-8l2xt\") pod \"console-operator-58897d9998-nr7vc\" (UID: \"ae1eb6bd-4b07-4540-b12c-c5fa318f5b64\") " pod="openshift-console-operator/console-operator-58897d9998-nr7vc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257896 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/51d19dc5-9ac9-4b48-93ae-5d29535e1df6-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-m4j9c\" (UID: \"51d19dc5-9ac9-4b48-93ae-5d29535e1df6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257916 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zn44g\" (UniqueName: \"kubernetes.io/projected/99ddc02a-f527-40a5-a6dd-330e712e955d-kube-api-access-zn44g\") pod \"dns-default-fw54p\" (UID: \"99ddc02a-f527-40a5-a6dd-330e712e955d\") " pod="openshift-dns/dns-default-fw54p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257939 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/110831e6-2732-4fe8-8f36-3505ff66495f-audit-dir\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257959 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/adcf30c3-b406-4735-8488-f8380a7ab1e8-tmpfs\") pod \"packageserver-d55dfcdfc-q26mq\" (UID: \"adcf30c3-b406-4735-8488-f8380a7ab1e8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.257994 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/110831e6-2732-4fe8-8f36-3505ff66495f-encryption-config\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.258015 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/299e04b4-ceeb-4acb-bd40-516a2eef1486-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-2dd45\" (UID: \"299e04b4-ceeb-4acb-bd40-516a2eef1486\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2dd45" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.258036 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8306e09d-864e-4342-adc2-e56288612d89-config\") pod \"kube-controller-manager-operator-78b949d7b-8f84m\" (UID: \"8306e09d-864e-4342-adc2-e56288612d89\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8f84m" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.258108 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/5a4ca39a-9c29-4904-aa12-cca8aff1453a-socket-dir\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.258565 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-config\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.259145 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/110831e6-2732-4fe8-8f36-3505ff66495f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.259251 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ae1eb6bd-4b07-4540-b12c-c5fa318f5b64-trusted-ca\") pod \"console-operator-58897d9998-nr7vc\" (UID: \"ae1eb6bd-4b07-4540-b12c-c5fa318f5b64\") " pod="openshift-console-operator/console-operator-58897d9998-nr7vc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.259304 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/110831e6-2732-4fe8-8f36-3505ff66495f-etcd-client\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.259943 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-etcd-serving-ca\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.260132 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-audit\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.260155 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-node-pullsecrets\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.260230 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2c78b93f-8347-4c41-a948-bacab534efdf-ca-trust-extracted\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.260559 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae1eb6bd-4b07-4540-b12c-c5fa318f5b64-config\") pod \"console-operator-58897d9998-nr7vc\" (UID: \"ae1eb6bd-4b07-4540-b12c-c5fa318f5b64\") " pod="openshift-console-operator/console-operator-58897d9998-nr7vc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.261032 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42104006-5aa2-4e76-9a90-5402fb280c09-config\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.261420 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/110831e6-2732-4fe8-8f36-3505ff66495f-audit-dir\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.261646 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2c78b93f-8347-4c41-a948-bacab534efdf-registry-certificates\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.263196 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ba56493b-6d33-4bee-bbb2-a431a7622cdb-auth-proxy-config\") pod \"machine-approver-56656f9798-skz8g\" (UID: \"ba56493b-6d33-4bee-bbb2-a431a7622cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.264457 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fdfc234-e5da-413a-89e5-226f99fe29af-config\") pod \"openshift-apiserver-operator-796bbdcf4f-znbxz\" (UID: \"7fdfc234-e5da-413a-89e5-226f99fe29af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-znbxz" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.264486 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fdfc234-e5da-413a-89e5-226f99fe29af-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-znbxz\" (UID: \"7fdfc234-e5da-413a-89e5-226f99fe29af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-znbxz" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.264536 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/5a4ca39a-9c29-4904-aa12-cca8aff1453a-registration-dir\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.266665 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ae1eb6bd-4b07-4540-b12c-c5fa318f5b64-trusted-ca\") pod \"console-operator-58897d9998-nr7vc\" (UID: \"ae1eb6bd-4b07-4540-b12c-c5fa318f5b64\") " pod="openshift-console-operator/console-operator-58897d9998-nr7vc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.269839 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/110831e6-2732-4fe8-8f36-3505ff66495f-etcd-client\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.269942 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-encryption-config\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.271071 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ae1eb6bd-4b07-4540-b12c-c5fa318f5b64-serving-cert\") pod \"console-operator-58897d9998-nr7vc\" (UID: \"ae1eb6bd-4b07-4540-b12c-c5fa318f5b64\") " pod="openshift-console-operator/console-operator-58897d9998-nr7vc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.271079 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/42104006-5aa2-4e76-9a90-5402fb280c09-serving-cert\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.271356 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-audit-dir\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.271373 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e86a349-8bfc-4667-8213-a034dc8c1a7b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-njhdl\" (UID: \"5e86a349-8bfc-4667-8213-a034dc8c1a7b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-njhdl" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.271566 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-image-import-ca\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.272345 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/51d19dc5-9ac9-4b48-93ae-5d29535e1df6-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-m4j9c\" (UID: \"51d19dc5-9ac9-4b48-93ae-5d29535e1df6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.272495 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht"] Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.272565 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/51d19dc5-9ac9-4b48-93ae-5d29535e1df6-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-m4j9c\" (UID: \"51d19dc5-9ac9-4b48-93ae-5d29535e1df6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.272959 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2c78b93f-8347-4c41-a948-bacab534efdf-installation-pull-secrets\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.273004 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/110831e6-2732-4fe8-8f36-3505ff66495f-encryption-config\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.273362 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/110831e6-2732-4fe8-8f36-3505ff66495f-serving-cert\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.274125 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2c78b93f-8347-4c41-a948-bacab534efdf-trusted-ca\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.275577 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqcqv\" (UniqueName: \"kubernetes.io/projected/42104006-5aa2-4e76-9a90-5402fb280c09-kube-api-access-qqcqv\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.275672 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8306e09d-864e-4342-adc2-e56288612d89-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8f84m\" (UID: \"8306e09d-864e-4342-adc2-e56288612d89\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8f84m" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.275749 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgp8l\" (UniqueName: \"kubernetes.io/projected/7fdfc234-e5da-413a-89e5-226f99fe29af-kube-api-access-wgp8l\") pod \"openshift-apiserver-operator-796bbdcf4f-znbxz\" (UID: \"7fdfc234-e5da-413a-89e5-226f99fe29af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-znbxz" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.275861 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-etcd-client\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.276217 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktkb4\" (UniqueName: \"kubernetes.io/projected/2d213635-9d7d-4d87-9b11-6744bbac9824-kube-api-access-ktkb4\") pod \"dns-operator-744455d44c-6sjk2\" (UID: \"2d213635-9d7d-4d87-9b11-6744bbac9824\") " pod="openshift-dns-operator/dns-operator-744455d44c-6sjk2" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.276256 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74959b1a-d8f4-415f-a901-4c5c162c41c9-config\") pod \"service-ca-operator-777779d784-2hmcd\" (UID: \"74959b1a-d8f4-415f-a901-4c5c162c41c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2hmcd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.276339 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtwpc\" (UniqueName: \"kubernetes.io/projected/1c2ff4c0-e878-4984-a12a-1b4f8cb3bc54-kube-api-access-dtwpc\") pod \"control-plane-machine-set-operator-78cbb6b69f-nmkts\" (UID: \"1c2ff4c0-e878-4984-a12a-1b4f8cb3bc54\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nmkts" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.276367 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-trusted-ca-bundle\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.276408 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/89108982-aec6-4c39-a675-508c22d2bf80-metrics-certs\") pod \"router-default-5444994796-nlljk\" (UID: \"89108982-aec6-4c39-a675-508c22d2bf80\") " pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.276435 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gb92f\" (UniqueName: \"kubernetes.io/projected/79816a71-299c-4111-a2e5-930f6b05710e-kube-api-access-gb92f\") pod \"migrator-59844c95c7-dsbhf\" (UID: \"79816a71-299c-4111-a2e5-930f6b05710e\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dsbhf" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.276458 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/42104006-5aa2-4e76-9a90-5402fb280c09-etcd-ca\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.276478 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba56493b-6d33-4bee-bbb2-a431a7622cdb-config\") pod \"machine-approver-56656f9798-skz8g\" (UID: \"ba56493b-6d33-4bee-bbb2-a431a7622cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.276538 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/adcf30c3-b406-4735-8488-f8380a7ab1e8-apiservice-cert\") pod \"packageserver-d55dfcdfc-q26mq\" (UID: \"adcf30c3-b406-4735-8488-f8380a7ab1e8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.276565 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/24d70a59-b89a-4f25-8033-38d879cfe981-proxy-tls\") pod \"machine-config-operator-74547568cd-p2tmp\" (UID: \"24d70a59-b89a-4f25-8033-38d879cfe981\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.276611 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8306e09d-864e-4342-adc2-e56288612d89-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8f84m\" (UID: \"8306e09d-864e-4342-adc2-e56288612d89\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8f84m" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.276808 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ba56493b-6d33-4bee-bbb2-a431a7622cdb-machine-approver-tls\") pod \"machine-approver-56656f9798-skz8g\" (UID: \"ba56493b-6d33-4bee-bbb2-a431a7622cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.276957 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/24d70a59-b89a-4f25-8033-38d879cfe981-auth-proxy-config\") pod \"machine-config-operator-74547568cd-p2tmp\" (UID: \"24d70a59-b89a-4f25-8033-38d879cfe981\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.277134 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba56493b-6d33-4bee-bbb2-a431a7622cdb-config\") pod \"machine-approver-56656f9798-skz8g\" (UID: \"ba56493b-6d33-4bee-bbb2-a431a7622cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.277679 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/42104006-5aa2-4e76-9a90-5402fb280c09-etcd-ca\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.277969 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/42104006-5aa2-4e76-9a90-5402fb280c09-etcd-client\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.278662 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-serving-cert\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.279118 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-registry-tls\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.280091 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-trusted-ca-bundle\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.282993 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ba56493b-6d33-4bee-bbb2-a431a7622cdb-machine-approver-tls\") pod \"machine-approver-56656f9798-skz8g\" (UID: \"ba56493b-6d33-4bee-bbb2-a431a7622cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.287447 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-etcd-client\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.292475 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2d2l\" (UniqueName: \"kubernetes.io/projected/89f4bdbb-1a86-42b4-8067-7be1209a54cc-kube-api-access-k2d2l\") pod \"cluster-samples-operator-665b6dd947-cm8l7\" (UID: \"89f4bdbb-1a86-42b4-8067-7be1209a54cc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cm8l7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.307275 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-bound-sa-token\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.331061 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e86a349-8bfc-4667-8213-a034dc8c1a7b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-njhdl\" (UID: \"5e86a349-8bfc-4667-8213-a034dc8c1a7b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-njhdl" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.337861 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-xsgbv"] Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.359553 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk88l\" (UniqueName: \"kubernetes.io/projected/ba56493b-6d33-4bee-bbb2-a431a7622cdb-kube-api-access-zk88l\") pod \"machine-approver-56656f9798-skz8g\" (UID: \"ba56493b-6d33-4bee-bbb2-a431a7622cdb\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.369302 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k46pj\" (UniqueName: \"kubernetes.io/projected/110831e6-2732-4fe8-8f36-3505ff66495f-kube-api-access-k46pj\") pod \"apiserver-7bbb656c7d-mvrs7\" (UID: \"110831e6-2732-4fe8-8f36-3505ff66495f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.373934 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.376688 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ptbs8"] Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.378642 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.378834 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3220bc21-2d0b-42ac-8970-ce9e0d52d896-profile-collector-cert\") pod \"catalog-operator-68c6474976-s6x6p\" (UID: \"3220bc21-2d0b-42ac-8970-ce9e0d52d896\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.378861 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/69b8063f-9b07-404d-91d8-e143fa0aa521-srv-cert\") pod \"olm-operator-6b444d44fb-9dxhv\" (UID: \"69b8063f-9b07-404d-91d8-e143fa0aa521\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.378891 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99ddc02a-f527-40a5-a6dd-330e712e955d-config-volume\") pod \"dns-default-fw54p\" (UID: \"99ddc02a-f527-40a5-a6dd-330e712e955d\") " pod="openshift-dns/dns-default-fw54p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.378912 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/89108982-aec6-4c39-a675-508c22d2bf80-stats-auth\") pod \"router-default-5444994796-nlljk\" (UID: \"89108982-aec6-4c39-a675-508c22d2bf80\") " pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.378930 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/958def5c-022a-4947-b863-499dad6954dc-signing-key\") pod \"service-ca-9c57cc56f-z7dn5\" (UID: \"958def5c-022a-4947-b863-499dad6954dc\") " pod="openshift-service-ca/service-ca-9c57cc56f-z7dn5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.378949 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e67221f8-ac2e-425b-8cb4-1bd164f4c2a7-node-bootstrap-token\") pod \"machine-config-server-phmxg\" (UID: \"e67221f8-ac2e-425b-8cb4-1bd164f4c2a7\") " pod="openshift-machine-config-operator/machine-config-server-phmxg" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.378977 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pxhk\" (UniqueName: \"kubernetes.io/projected/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-kube-api-access-6pxhk\") pod \"collect-profiles-29339820-8wtvj\" (UID: \"62c53fad-57ac-4c62-86aa-f73e4e35b1f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.378996 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhwjl\" (UniqueName: \"kubernetes.io/projected/74959b1a-d8f4-415f-a901-4c5c162c41c9-kube-api-access-fhwjl\") pod \"service-ca-operator-777779d784-2hmcd\" (UID: \"74959b1a-d8f4-415f-a901-4c5c162c41c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2hmcd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379011 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/958def5c-022a-4947-b863-499dad6954dc-signing-cabundle\") pod \"service-ca-9c57cc56f-z7dn5\" (UID: \"958def5c-022a-4947-b863-499dad6954dc\") " pod="openshift-service-ca/service-ca-9c57cc56f-z7dn5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379025 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8k7zw\" (UniqueName: \"kubernetes.io/projected/958def5c-022a-4947-b863-499dad6954dc-kube-api-access-8k7zw\") pod \"service-ca-9c57cc56f-z7dn5\" (UID: \"958def5c-022a-4947-b863-499dad6954dc\") " pod="openshift-service-ca/service-ca-9c57cc56f-z7dn5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379052 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/69af70ca-2b63-4a93-86de-d9e5f44dffb3-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-vksbd\" (UID: \"69af70ca-2b63-4a93-86de-d9e5f44dffb3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vksbd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379069 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/5a4ca39a-9c29-4904-aa12-cca8aff1453a-csi-data-dir\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379092 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/299e04b4-ceeb-4acb-bd40-516a2eef1486-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-2dd45\" (UID: \"299e04b4-ceeb-4acb-bd40-516a2eef1486\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2dd45" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379110 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzsv9\" (UniqueName: \"kubernetes.io/projected/adcf30c3-b406-4735-8488-f8380a7ab1e8-kube-api-access-pzsv9\") pod \"packageserver-d55dfcdfc-q26mq\" (UID: \"adcf30c3-b406-4735-8488-f8380a7ab1e8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379128 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/99ddc02a-f527-40a5-a6dd-330e712e955d-metrics-tls\") pod \"dns-default-fw54p\" (UID: \"99ddc02a-f527-40a5-a6dd-330e712e955d\") " pod="openshift-dns/dns-default-fw54p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379144 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqzl4\" (UniqueName: \"kubernetes.io/projected/a557b09e-dd8c-4ad4-8a24-d817c186d537-kube-api-access-rqzl4\") pod \"multus-admission-controller-857f4d67dd-qtbn4\" (UID: \"a557b09e-dd8c-4ad4-8a24-d817c186d537\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qtbn4" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379161 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvq4x\" (UniqueName: \"kubernetes.io/projected/69b8063f-9b07-404d-91d8-e143fa0aa521-kube-api-access-rvq4x\") pod \"olm-operator-6b444d44fb-9dxhv\" (UID: \"69b8063f-9b07-404d-91d8-e143fa0aa521\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379185 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69af70ca-2b63-4a93-86de-d9e5f44dffb3-config\") pod \"kube-apiserver-operator-766d6c64bb-vksbd\" (UID: \"69af70ca-2b63-4a93-86de-d9e5f44dffb3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vksbd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379202 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/adcf30c3-b406-4735-8488-f8380a7ab1e8-webhook-cert\") pod \"packageserver-d55dfcdfc-q26mq\" (UID: \"adcf30c3-b406-4735-8488-f8380a7ab1e8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379225 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4442f39b-048e-493e-86b8-256c5c39dca5-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2kfvc\" (UID: \"4442f39b-048e-493e-86b8-256c5c39dca5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2kfvc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379243 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d3229a49-3424-464a-b479-460fb0a21620-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mql5t\" (UID: \"d3229a49-3424-464a-b479-460fb0a21620\") " pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379264 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgc9r\" (UniqueName: \"kubernetes.io/projected/3220bc21-2d0b-42ac-8970-ce9e0d52d896-kube-api-access-mgc9r\") pod \"catalog-operator-68c6474976-s6x6p\" (UID: \"3220bc21-2d0b-42ac-8970-ce9e0d52d896\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379278 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzgqs\" (UniqueName: \"kubernetes.io/projected/e67221f8-ac2e-425b-8cb4-1bd164f4c2a7-kube-api-access-rzgqs\") pod \"machine-config-server-phmxg\" (UID: \"e67221f8-ac2e-425b-8cb4-1bd164f4c2a7\") " pod="openshift-machine-config-operator/machine-config-server-phmxg" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379294 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7efd4908-44a4-4b78-84cc-46c7578ee5a7-cert\") pod \"ingress-canary-5bzn7\" (UID: \"7efd4908-44a4-4b78-84cc-46c7578ee5a7\") " pod="openshift-ingress-canary/ingress-canary-5bzn7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379308 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-config-volume\") pod \"collect-profiles-29339820-8wtvj\" (UID: \"62c53fad-57ac-4c62-86aa-f73e4e35b1f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379333 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hmmw\" (UniqueName: \"kubernetes.io/projected/299e04b4-ceeb-4acb-bd40-516a2eef1486-kube-api-access-9hmmw\") pod \"kube-storage-version-migrator-operator-b67b599dd-2dd45\" (UID: \"299e04b4-ceeb-4acb-bd40-516a2eef1486\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2dd45" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379352 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nprzx\" (UniqueName: \"kubernetes.io/projected/24d70a59-b89a-4f25-8033-38d879cfe981-kube-api-access-nprzx\") pod \"machine-config-operator-74547568cd-p2tmp\" (UID: \"24d70a59-b89a-4f25-8033-38d879cfe981\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379369 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pn9zb\" (UniqueName: \"kubernetes.io/projected/7efd4908-44a4-4b78-84cc-46c7578ee5a7-kube-api-access-pn9zb\") pod \"ingress-canary-5bzn7\" (UID: \"7efd4908-44a4-4b78-84cc-46c7578ee5a7\") " pod="openshift-ingress-canary/ingress-canary-5bzn7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379390 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3229a49-3424-464a-b479-460fb0a21620-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mql5t\" (UID: \"d3229a49-3424-464a-b479-460fb0a21620\") " pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379406 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e67221f8-ac2e-425b-8cb4-1bd164f4c2a7-certs\") pod \"machine-config-server-phmxg\" (UID: \"e67221f8-ac2e-425b-8cb4-1bd164f4c2a7\") " pod="openshift-machine-config-operator/machine-config-server-phmxg" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379422 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zn44g\" (UniqueName: \"kubernetes.io/projected/99ddc02a-f527-40a5-a6dd-330e712e955d-kube-api-access-zn44g\") pod \"dns-default-fw54p\" (UID: \"99ddc02a-f527-40a5-a6dd-330e712e955d\") " pod="openshift-dns/dns-default-fw54p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379437 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/adcf30c3-b406-4735-8488-f8380a7ab1e8-tmpfs\") pod \"packageserver-d55dfcdfc-q26mq\" (UID: \"adcf30c3-b406-4735-8488-f8380a7ab1e8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379451 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/299e04b4-ceeb-4acb-bd40-516a2eef1486-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-2dd45\" (UID: \"299e04b4-ceeb-4acb-bd40-516a2eef1486\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2dd45" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379468 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8306e09d-864e-4342-adc2-e56288612d89-config\") pod \"kube-controller-manager-operator-78b949d7b-8f84m\" (UID: \"8306e09d-864e-4342-adc2-e56288612d89\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8f84m" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379484 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/5a4ca39a-9c29-4904-aa12-cca8aff1453a-socket-dir\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379505 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/5a4ca39a-9c29-4904-aa12-cca8aff1453a-registration-dir\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379526 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8306e09d-864e-4342-adc2-e56288612d89-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8f84m\" (UID: \"8306e09d-864e-4342-adc2-e56288612d89\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8f84m" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379548 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74959b1a-d8f4-415f-a901-4c5c162c41c9-config\") pod \"service-ca-operator-777779d784-2hmcd\" (UID: \"74959b1a-d8f4-415f-a901-4c5c162c41c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2hmcd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379572 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktkb4\" (UniqueName: \"kubernetes.io/projected/2d213635-9d7d-4d87-9b11-6744bbac9824-kube-api-access-ktkb4\") pod \"dns-operator-744455d44c-6sjk2\" (UID: \"2d213635-9d7d-4d87-9b11-6744bbac9824\") " pod="openshift-dns-operator/dns-operator-744455d44c-6sjk2" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379605 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/89108982-aec6-4c39-a675-508c22d2bf80-metrics-certs\") pod \"router-default-5444994796-nlljk\" (UID: \"89108982-aec6-4c39-a675-508c22d2bf80\") " pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379622 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtwpc\" (UniqueName: \"kubernetes.io/projected/1c2ff4c0-e878-4984-a12a-1b4f8cb3bc54-kube-api-access-dtwpc\") pod \"control-plane-machine-set-operator-78cbb6b69f-nmkts\" (UID: \"1c2ff4c0-e878-4984-a12a-1b4f8cb3bc54\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nmkts" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379640 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gb92f\" (UniqueName: \"kubernetes.io/projected/79816a71-299c-4111-a2e5-930f6b05710e-kube-api-access-gb92f\") pod \"migrator-59844c95c7-dsbhf\" (UID: \"79816a71-299c-4111-a2e5-930f6b05710e\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dsbhf" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379656 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/24d70a59-b89a-4f25-8033-38d879cfe981-proxy-tls\") pod \"machine-config-operator-74547568cd-p2tmp\" (UID: \"24d70a59-b89a-4f25-8033-38d879cfe981\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379671 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8306e09d-864e-4342-adc2-e56288612d89-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8f84m\" (UID: \"8306e09d-864e-4342-adc2-e56288612d89\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8f84m" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379688 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/adcf30c3-b406-4735-8488-f8380a7ab1e8-apiservice-cert\") pod \"packageserver-d55dfcdfc-q26mq\" (UID: \"adcf30c3-b406-4735-8488-f8380a7ab1e8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379704 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/24d70a59-b89a-4f25-8033-38d879cfe981-auth-proxy-config\") pod \"machine-config-operator-74547568cd-p2tmp\" (UID: \"24d70a59-b89a-4f25-8033-38d879cfe981\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379721 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/5a4ca39a-9c29-4904-aa12-cca8aff1453a-mountpoint-dir\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379738 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74959b1a-d8f4-415f-a901-4c5c162c41c9-serving-cert\") pod \"service-ca-operator-777779d784-2hmcd\" (UID: \"74959b1a-d8f4-415f-a901-4c5c162c41c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2hmcd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379755 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/69af70ca-2b63-4a93-86de-d9e5f44dffb3-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-vksbd\" (UID: \"69af70ca-2b63-4a93-86de-d9e5f44dffb3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vksbd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379771 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/5a4ca39a-9c29-4904-aa12-cca8aff1453a-plugins-dir\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379791 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/1c2ff4c0-e878-4984-a12a-1b4f8cb3bc54-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-nmkts\" (UID: \"1c2ff4c0-e878-4984-a12a-1b4f8cb3bc54\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nmkts" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379810 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/69b8063f-9b07-404d-91d8-e143fa0aa521-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9dxhv\" (UID: \"69b8063f-9b07-404d-91d8-e143fa0aa521\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379828 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7fzb\" (UniqueName: \"kubernetes.io/projected/5a4ca39a-9c29-4904-aa12-cca8aff1453a-kube-api-access-j7fzb\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379846 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2d213635-9d7d-4d87-9b11-6744bbac9824-metrics-tls\") pod \"dns-operator-744455d44c-6sjk2\" (UID: \"2d213635-9d7d-4d87-9b11-6744bbac9824\") " pod="openshift-dns-operator/dns-operator-744455d44c-6sjk2" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379869 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a557b09e-dd8c-4ad4-8a24-d817c186d537-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qtbn4\" (UID: \"a557b09e-dd8c-4ad4-8a24-d817c186d537\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qtbn4" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379885 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cxtm\" (UniqueName: \"kubernetes.io/projected/4442f39b-048e-493e-86b8-256c5c39dca5-kube-api-access-2cxtm\") pod \"package-server-manager-789f6589d5-2kfvc\" (UID: \"4442f39b-048e-493e-86b8-256c5c39dca5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2kfvc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379912 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/24d70a59-b89a-4f25-8033-38d879cfe981-images\") pod \"machine-config-operator-74547568cd-p2tmp\" (UID: \"24d70a59-b89a-4f25-8033-38d879cfe981\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379927 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3220bc21-2d0b-42ac-8970-ce9e0d52d896-srv-cert\") pod \"catalog-operator-68c6474976-s6x6p\" (UID: \"3220bc21-2d0b-42ac-8970-ce9e0d52d896\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379944 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-secret-volume\") pod \"collect-profiles-29339820-8wtvj\" (UID: \"62c53fad-57ac-4c62-86aa-f73e4e35b1f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379961 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnvj5\" (UniqueName: \"kubernetes.io/projected/89108982-aec6-4c39-a675-508c22d2bf80-kube-api-access-vnvj5\") pod \"router-default-5444994796-nlljk\" (UID: \"89108982-aec6-4c39-a675-508c22d2bf80\") " pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379982 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/89108982-aec6-4c39-a675-508c22d2bf80-default-certificate\") pod \"router-default-5444994796-nlljk\" (UID: \"89108982-aec6-4c39-a675-508c22d2bf80\") " pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.379998 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/89108982-aec6-4c39-a675-508c22d2bf80-service-ca-bundle\") pod \"router-default-5444994796-nlljk\" (UID: \"89108982-aec6-4c39-a675-508c22d2bf80\") " pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.380016 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8tzj\" (UniqueName: \"kubernetes.io/projected/d3229a49-3424-464a-b479-460fb0a21620-kube-api-access-h8tzj\") pod \"marketplace-operator-79b997595-mql5t\" (UID: \"d3229a49-3424-464a-b479-460fb0a21620\") " pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" Oct 13 21:13:45 crc kubenswrapper[4689]: E1013 21:13:45.380289 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:45.880246917 +0000 UTC m=+142.798492002 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.384093 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/adcf30c3-b406-4735-8488-f8380a7ab1e8-tmpfs\") pod \"packageserver-d55dfcdfc-q26mq\" (UID: \"adcf30c3-b406-4735-8488-f8380a7ab1e8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.385391 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/5a4ca39a-9c29-4904-aa12-cca8aff1453a-mountpoint-dir\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.388458 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8306e09d-864e-4342-adc2-e56288612d89-config\") pod \"kube-controller-manager-operator-78b949d7b-8f84m\" (UID: \"8306e09d-864e-4342-adc2-e56288612d89\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8f84m" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.388790 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/5a4ca39a-9c29-4904-aa12-cca8aff1453a-socket-dir\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.389133 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-config-volume\") pod \"collect-profiles-29339820-8wtvj\" (UID: \"62c53fad-57ac-4c62-86aa-f73e4e35b1f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.390647 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/5a4ca39a-9c29-4904-aa12-cca8aff1453a-registration-dir\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.391388 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69af70ca-2b63-4a93-86de-d9e5f44dffb3-config\") pod \"kube-apiserver-operator-766d6c64bb-vksbd\" (UID: \"69af70ca-2b63-4a93-86de-d9e5f44dffb3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vksbd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.392055 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/5a4ca39a-9c29-4904-aa12-cca8aff1453a-plugins-dir\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.385427 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99ddc02a-f527-40a5-a6dd-330e712e955d-config-volume\") pod \"dns-default-fw54p\" (UID: \"99ddc02a-f527-40a5-a6dd-330e712e955d\") " pod="openshift-dns/dns-default-fw54p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.393734 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74959b1a-d8f4-415f-a901-4c5c162c41c9-config\") pod \"service-ca-operator-777779d784-2hmcd\" (UID: \"74959b1a-d8f4-415f-a901-4c5c162c41c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2hmcd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.393820 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/69b8063f-9b07-404d-91d8-e143fa0aa521-srv-cert\") pod \"olm-operator-6b444d44fb-9dxhv\" (UID: \"69b8063f-9b07-404d-91d8-e143fa0aa521\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.394271 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3229a49-3424-464a-b479-460fb0a21620-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mql5t\" (UID: \"d3229a49-3424-464a-b479-460fb0a21620\") " pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.394953 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3220bc21-2d0b-42ac-8970-ce9e0d52d896-profile-collector-cert\") pod \"catalog-operator-68c6474976-s6x6p\" (UID: \"3220bc21-2d0b-42ac-8970-ce9e0d52d896\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.395986 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/1c2ff4c0-e878-4984-a12a-1b4f8cb3bc54-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-nmkts\" (UID: \"1c2ff4c0-e878-4984-a12a-1b4f8cb3bc54\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nmkts" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.396433 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/89108982-aec6-4c39-a675-508c22d2bf80-service-ca-bundle\") pod \"router-default-5444994796-nlljk\" (UID: \"89108982-aec6-4c39-a675-508c22d2bf80\") " pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.397031 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4442f39b-048e-493e-86b8-256c5c39dca5-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2kfvc\" (UID: \"4442f39b-048e-493e-86b8-256c5c39dca5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2kfvc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.397555 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7efd4908-44a4-4b78-84cc-46c7578ee5a7-cert\") pod \"ingress-canary-5bzn7\" (UID: \"7efd4908-44a4-4b78-84cc-46c7578ee5a7\") " pod="openshift-ingress-canary/ingress-canary-5bzn7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.399149 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/24d70a59-b89a-4f25-8033-38d879cfe981-images\") pod \"machine-config-operator-74547568cd-p2tmp\" (UID: \"24d70a59-b89a-4f25-8033-38d879cfe981\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.399886 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/89108982-aec6-4c39-a675-508c22d2bf80-default-certificate\") pod \"router-default-5444994796-nlljk\" (UID: \"89108982-aec6-4c39-a675-508c22d2bf80\") " pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.400091 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/24d70a59-b89a-4f25-8033-38d879cfe981-auth-proxy-config\") pod \"machine-config-operator-74547568cd-p2tmp\" (UID: \"24d70a59-b89a-4f25-8033-38d879cfe981\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.400561 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-tckgt"] Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.400640 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/69b8063f-9b07-404d-91d8-e143fa0aa521-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9dxhv\" (UID: \"69b8063f-9b07-404d-91d8-e143fa0aa521\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.400911 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/5a4ca39a-9c29-4904-aa12-cca8aff1453a-csi-data-dir\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.401115 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/89108982-aec6-4c39-a675-508c22d2bf80-metrics-certs\") pod \"router-default-5444994796-nlljk\" (UID: \"89108982-aec6-4c39-a675-508c22d2bf80\") " pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.401540 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/299e04b4-ceeb-4acb-bd40-516a2eef1486-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-2dd45\" (UID: \"299e04b4-ceeb-4acb-bd40-516a2eef1486\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2dd45" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.401746 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/958def5c-022a-4947-b863-499dad6954dc-signing-cabundle\") pod \"service-ca-9c57cc56f-z7dn5\" (UID: \"958def5c-022a-4947-b863-499dad6954dc\") " pod="openshift-service-ca/service-ca-9c57cc56f-z7dn5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.401937 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cm8l7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.402700 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3220bc21-2d0b-42ac-8970-ce9e0d52d896-srv-cert\") pod \"catalog-operator-68c6474976-s6x6p\" (UID: \"3220bc21-2d0b-42ac-8970-ce9e0d52d896\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.403218 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e67221f8-ac2e-425b-8cb4-1bd164f4c2a7-node-bootstrap-token\") pod \"machine-config-server-phmxg\" (UID: \"e67221f8-ac2e-425b-8cb4-1bd164f4c2a7\") " pod="openshift-machine-config-operator/machine-config-server-phmxg" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.404665 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a557b09e-dd8c-4ad4-8a24-d817c186d537-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qtbn4\" (UID: \"a557b09e-dd8c-4ad4-8a24-d817c186d537\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qtbn4" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.406372 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/299e04b4-ceeb-4acb-bd40-516a2eef1486-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-2dd45\" (UID: \"299e04b4-ceeb-4acb-bd40-516a2eef1486\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2dd45" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.406677 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/24d70a59-b89a-4f25-8033-38d879cfe981-proxy-tls\") pod \"machine-config-operator-74547568cd-p2tmp\" (UID: \"24d70a59-b89a-4f25-8033-38d879cfe981\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.406725 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74959b1a-d8f4-415f-a901-4c5c162c41c9-serving-cert\") pod \"service-ca-operator-777779d784-2hmcd\" (UID: \"74959b1a-d8f4-415f-a901-4c5c162c41c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2hmcd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.406885 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8306e09d-864e-4342-adc2-e56288612d89-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8f84m\" (UID: \"8306e09d-864e-4342-adc2-e56288612d89\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8f84m" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.407146 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/69af70ca-2b63-4a93-86de-d9e5f44dffb3-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-vksbd\" (UID: \"69af70ca-2b63-4a93-86de-d9e5f44dffb3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vksbd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.407164 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/958def5c-022a-4947-b863-499dad6954dc-signing-key\") pod \"service-ca-9c57cc56f-z7dn5\" (UID: \"958def5c-022a-4947-b863-499dad6954dc\") " pod="openshift-service-ca/service-ca-9c57cc56f-z7dn5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.407619 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/99ddc02a-f527-40a5-a6dd-330e712e955d-metrics-tls\") pod \"dns-default-fw54p\" (UID: \"99ddc02a-f527-40a5-a6dd-330e712e955d\") " pod="openshift-dns/dns-default-fw54p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.408053 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/adcf30c3-b406-4735-8488-f8380a7ab1e8-webhook-cert\") pod \"packageserver-d55dfcdfc-q26mq\" (UID: \"adcf30c3-b406-4735-8488-f8380a7ab1e8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.408302 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/89108982-aec6-4c39-a675-508c22d2bf80-stats-auth\") pod \"router-default-5444994796-nlljk\" (UID: \"89108982-aec6-4c39-a675-508c22d2bf80\") " pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.414554 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/adcf30c3-b406-4735-8488-f8380a7ab1e8-apiservice-cert\") pod \"packageserver-d55dfcdfc-q26mq\" (UID: \"adcf30c3-b406-4735-8488-f8380a7ab1e8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.414774 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e67221f8-ac2e-425b-8cb4-1bd164f4c2a7-certs\") pod \"machine-config-server-phmxg\" (UID: \"e67221f8-ac2e-425b-8cb4-1bd164f4c2a7\") " pod="openshift-machine-config-operator/machine-config-server-phmxg" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.415222 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d3229a49-3424-464a-b479-460fb0a21620-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mql5t\" (UID: \"d3229a49-3424-464a-b479-460fb0a21620\") " pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.415635 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-secret-volume\") pod \"collect-profiles-29339820-8wtvj\" (UID: \"62c53fad-57ac-4c62-86aa-f73e4e35b1f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.417424 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2d213635-9d7d-4d87-9b11-6744bbac9824-metrics-tls\") pod \"dns-operator-744455d44c-6sjk2\" (UID: \"2d213635-9d7d-4d87-9b11-6744bbac9824\") " pod="openshift-dns-operator/dns-operator-744455d44c-6sjk2" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.422320 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wg85\" (UniqueName: \"kubernetes.io/projected/f762bf8c-9be0-4bbe-a2ec-66f701aa99b0-kube-api-access-6wg85\") pod \"apiserver-76f77b778f-j5nvb\" (UID: \"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0\") " pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.423676 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cglhk\" (UniqueName: \"kubernetes.io/projected/51d19dc5-9ac9-4b48-93ae-5d29535e1df6-kube-api-access-cglhk\") pod \"cluster-image-registry-operator-dc59b4c8b-m4j9c\" (UID: \"51d19dc5-9ac9-4b48-93ae-5d29535e1df6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.427391 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7gghw"] Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.441044 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlkkz\" (UniqueName: \"kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-kube-api-access-rlkkz\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.458791 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/51d19dc5-9ac9-4b48-93ae-5d29535e1df6-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-m4j9c\" (UID: \"51d19dc5-9ac9-4b48-93ae-5d29535e1df6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.465700 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p7j66"] Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.477122 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8l2xt\" (UniqueName: \"kubernetes.io/projected/ae1eb6bd-4b07-4540-b12c-c5fa318f5b64-kube-api-access-8l2xt\") pod \"console-operator-58897d9998-nr7vc\" (UID: \"ae1eb6bd-4b07-4540-b12c-c5fa318f5b64\") " pod="openshift-console-operator/console-operator-58897d9998-nr7vc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.484928 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: E1013 21:13:45.485620 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:45.985604007 +0000 UTC m=+142.903849092 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.496115 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.511743 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.515700 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqcqv\" (UniqueName: \"kubernetes.io/projected/42104006-5aa2-4e76-9a90-5402fb280c09-kube-api-access-qqcqv\") pod \"etcd-operator-b45778765-9bdb5\" (UID: \"42104006-5aa2-4e76-9a90-5402fb280c09\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.515939 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-njhdl" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.529563 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgp8l\" (UniqueName: \"kubernetes.io/projected/7fdfc234-e5da-413a-89e5-226f99fe29af-kube-api-access-wgp8l\") pod \"openshift-apiserver-operator-796bbdcf4f-znbxz\" (UID: \"7fdfc234-e5da-413a-89e5-226f99fe29af\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-znbxz" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.562359 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg"] Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.564234 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fdwnr"] Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.571441 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8tzj\" (UniqueName: \"kubernetes.io/projected/d3229a49-3424-464a-b479-460fb0a21620-kube-api-access-h8tzj\") pod \"marketplace-operator-79b997595-mql5t\" (UID: \"d3229a49-3424-464a-b479-460fb0a21620\") " pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.586329 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvq4x\" (UniqueName: \"kubernetes.io/projected/69b8063f-9b07-404d-91d8-e143fa0aa521-kube-api-access-rvq4x\") pod \"olm-operator-6b444d44fb-9dxhv\" (UID: \"69b8063f-9b07-404d-91d8-e143fa0aa521\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.588494 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:45 crc kubenswrapper[4689]: E1013 21:13:45.589198 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:46.089181498 +0000 UTC m=+143.007426583 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.618714 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pn9zb\" (UniqueName: \"kubernetes.io/projected/7efd4908-44a4-4b78-84cc-46c7578ee5a7-kube-api-access-pn9zb\") pod \"ingress-canary-5bzn7\" (UID: \"7efd4908-44a4-4b78-84cc-46c7578ee5a7\") " pod="openshift-ingress-canary/ingress-canary-5bzn7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.619134 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-nr7vc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.619718 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hmmw\" (UniqueName: \"kubernetes.io/projected/299e04b4-ceeb-4acb-bd40-516a2eef1486-kube-api-access-9hmmw\") pod \"kube-storage-version-migrator-operator-b67b599dd-2dd45\" (UID: \"299e04b4-ceeb-4acb-bd40-516a2eef1486\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2dd45" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.644697 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.646925 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nprzx\" (UniqueName: \"kubernetes.io/projected/24d70a59-b89a-4f25-8033-38d879cfe981-kube-api-access-nprzx\") pod \"machine-config-operator-74547568cd-p2tmp\" (UID: \"24d70a59-b89a-4f25-8033-38d879cfe981\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" Oct 13 21:13:45 crc kubenswrapper[4689]: W1013 21:13:45.648705 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod187c4133_36b2_4d56_be78_75d555af16a4.slice/crio-91bd39c90adf5dd7e6de78e2ce14c749a127299bf2819b513d32ed3229889056 WatchSource:0}: Error finding container 91bd39c90adf5dd7e6de78e2ce14c749a127299bf2819b513d32ed3229889056: Status 404 returned error can't find the container with id 91bd39c90adf5dd7e6de78e2ce14c749a127299bf2819b513d32ed3229889056 Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.652348 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-znbxz" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.668658 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8306e09d-864e-4342-adc2-e56288612d89-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8f84m\" (UID: \"8306e09d-864e-4342-adc2-e56288612d89\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8f84m" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.675781 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktkb4\" (UniqueName: \"kubernetes.io/projected/2d213635-9d7d-4d87-9b11-6744bbac9824-kube-api-access-ktkb4\") pod \"dns-operator-744455d44c-6sjk2\" (UID: \"2d213635-9d7d-4d87-9b11-6744bbac9824\") " pod="openshift-dns-operator/dns-operator-744455d44c-6sjk2" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.677334 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-6sjk2" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.697846 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c" Oct 13 21:13:45 crc kubenswrapper[4689]: E1013 21:13:45.698353 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:46.198338601 +0000 UTC m=+143.116583686 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.698064 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.701450 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cxtm\" (UniqueName: \"kubernetes.io/projected/4442f39b-048e-493e-86b8-256c5c39dca5-kube-api-access-2cxtm\") pod \"package-server-manager-789f6589d5-2kfvc\" (UID: \"4442f39b-048e-493e-86b8-256c5c39dca5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2kfvc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.724058 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zn44g\" (UniqueName: \"kubernetes.io/projected/99ddc02a-f527-40a5-a6dd-330e712e955d-kube-api-access-zn44g\") pod \"dns-default-fw54p\" (UID: \"99ddc02a-f527-40a5-a6dd-330e712e955d\") " pod="openshift-dns/dns-default-fw54p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.726752 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-5bzn7" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.741208 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnvj5\" (UniqueName: \"kubernetes.io/projected/89108982-aec6-4c39-a675-508c22d2bf80-kube-api-access-vnvj5\") pod \"router-default-5444994796-nlljk\" (UID: \"89108982-aec6-4c39-a675-508c22d2bf80\") " pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.742159 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cm8l7"] Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.749829 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" event={"ID":"a316203a-ecb5-465b-a364-c5b517f31dee","Type":"ContainerStarted","Data":"f36af53812f61ed47fc20c9d53a4db096d81a766023a4e7b2e816e165afbccbd"} Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.749914 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" event={"ID":"a316203a-ecb5-465b-a364-c5b517f31dee","Type":"ContainerStarted","Data":"9abe0de1126ca0d78e01baacb70cbd0d96da0b839b673f862c846252abe17989"} Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.756799 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z5vx4" event={"ID":"0d4c9845-75c1-43df-b20c-2e90d4830d84","Type":"ContainerStarted","Data":"cf84c96efb39c26b9c33d933b0eab7627358d4514a2afe157ff8820f7fbcf6d4"} Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.756821 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z5vx4" event={"ID":"0d4c9845-75c1-43df-b20c-2e90d4830d84","Type":"ContainerStarted","Data":"3378aad154596b1a7016394dacd13113ce904ff52e6d04996f955121cedbcf48"} Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.758381 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" event={"ID":"202442b7-241e-44ee-b24f-0eac63864890","Type":"ContainerStarted","Data":"20a2b2411e8bd189698e75254f0cd56c6c1ab168f66dddcff2803bbe3885136d"} Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.769462 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgc9r\" (UniqueName: \"kubernetes.io/projected/3220bc21-2d0b-42ac-8970-ce9e0d52d896-kube-api-access-mgc9r\") pod \"catalog-operator-68c6474976-s6x6p\" (UID: \"3220bc21-2d0b-42ac-8970-ce9e0d52d896\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.774084 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tckgt" event={"ID":"58d51d91-5495-47c7-a6ed-9a8964688b49","Type":"ContainerStarted","Data":"b85ae37045fed8036bd1a66058f87fe5cfcb26fc456e6aa8d7f370e2e31707ee"} Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.776731 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-xsgbv" event={"ID":"1f429fdd-330f-4526-9fcd-fb6293286256","Type":"ContainerStarted","Data":"3f00b1188e3a7986d6514f422655a1695fd9ba396b31bd91129d4463c1507bb5"} Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.776757 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-xsgbv" event={"ID":"1f429fdd-330f-4526-9fcd-fb6293286256","Type":"ContainerStarted","Data":"77e73a3beff08c006441311c4aa8ed51ab44907c480ce40b4451748da79875b5"} Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.778360 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ptbs8" event={"ID":"c5168fba-bd94-4b2b-a4d4-886f93ea2988","Type":"ContainerStarted","Data":"66d8b33568a42eafcab4b7077a29b78537fa9afe3b94be1c6b66f1b994b9f576"} Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.778383 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ptbs8" event={"ID":"c5168fba-bd94-4b2b-a4d4-886f93ea2988","Type":"ContainerStarted","Data":"e643ce19c7ae3dcabe04e577718ec8ab47c2ef5605c6e98cfcf3225085150e89"} Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.790384 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzgqs\" (UniqueName: \"kubernetes.io/projected/e67221f8-ac2e-425b-8cb4-1bd164f4c2a7-kube-api-access-rzgqs\") pod \"machine-config-server-phmxg\" (UID: \"e67221f8-ac2e-425b-8cb4-1bd164f4c2a7\") " pod="openshift-machine-config-operator/machine-config-server-phmxg" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.791209 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" event={"ID":"ba56493b-6d33-4bee-bbb2-a431a7622cdb","Type":"ContainerStarted","Data":"ec51d0a53c5353604ebbcd20539d487e5316de96210badbc9286f6b3f26324ca"} Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.793617 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" event={"ID":"72d762df-fe7c-4966-a3fe-62904ca5b93a","Type":"ContainerStarted","Data":"ef919ba6ce77eb959e1a91c4453a5d5def16cf7c8d58c5fb37dff3d2087822ec"} Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.793654 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" event={"ID":"72d762df-fe7c-4966-a3fe-62904ca5b93a","Type":"ContainerStarted","Data":"eae55040e17704b82b52d259be8d1885f186969d391a94a09c275e4fd21c843f"} Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.794666 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" event={"ID":"bd0e15ea-205d-4e73-822b-b16cc8f33ab2","Type":"ContainerStarted","Data":"d394fd0ad18b78c2152aac5b99666d0e5480cb27edde98c1d8c9f6c829e35c64"} Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.804097 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:45 crc kubenswrapper[4689]: E1013 21:13:45.804662 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:46.304640427 +0000 UTC m=+143.222885512 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.808890 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.817259 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtwpc\" (UniqueName: \"kubernetes.io/projected/1c2ff4c0-e878-4984-a12a-1b4f8cb3bc54-kube-api-access-dtwpc\") pod \"control-plane-machine-set-operator-78cbb6b69f-nmkts\" (UID: \"1c2ff4c0-e878-4984-a12a-1b4f8cb3bc54\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nmkts" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.820193 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" event={"ID":"cfbc99b4-60d2-42c6-8c34-13c7f60fd122","Type":"ContainerStarted","Data":"f985627d0b851a1ca3700e284ef70fdfb7267da182009ed28d25bbb6e2802134"} Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.820251 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" event={"ID":"cfbc99b4-60d2-42c6-8c34-13c7f60fd122","Type":"ContainerStarted","Data":"d5343c19dacfa88620b6eaac0ed6d5094f39583558aefacc6d0731c329d4467d"} Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.821101 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.823097 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" event={"ID":"9db38791-f017-4d4f-b9e9-08f3ccd38704","Type":"ContainerStarted","Data":"6b3a33207043212cfdf02f1cd789d4cd549c5c906369b37461ea82e415f38470"} Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.826485 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7"] Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.829821 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.837780 4689 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-82j7d container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.837843 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" podUID="cfbc99b4-60d2-42c6-8c34-13c7f60fd122" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.838579 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.846128 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7fzb\" (UniqueName: \"kubernetes.io/projected/5a4ca39a-9c29-4904-aa12-cca8aff1453a-kube-api-access-j7fzb\") pod \"csi-hostpathplugin-lbpft\" (UID: \"5a4ca39a-9c29-4904-aa12-cca8aff1453a\") " pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.857793 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8k7zw\" (UniqueName: \"kubernetes.io/projected/958def5c-022a-4947-b863-499dad6954dc-kube-api-access-8k7zw\") pod \"service-ca-9c57cc56f-z7dn5\" (UID: \"958def5c-022a-4947-b863-499dad6954dc\") " pod="openshift-service-ca/service-ca-9c57cc56f-z7dn5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.864964 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2dd45" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.872848 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8f84m" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.878990 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhwjl\" (UniqueName: \"kubernetes.io/projected/74959b1a-d8f4-415f-a901-4c5c162c41c9-kube-api-access-fhwjl\") pod \"service-ca-operator-777779d784-2hmcd\" (UID: \"74959b1a-d8f4-415f-a901-4c5c162c41c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2hmcd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.881661 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.892262 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.897129 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gb92f\" (UniqueName: \"kubernetes.io/projected/79816a71-299c-4111-a2e5-930f6b05710e-kube-api-access-gb92f\") pod \"migrator-59844c95c7-dsbhf\" (UID: \"79816a71-299c-4111-a2e5-930f6b05710e\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dsbhf" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.906217 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2kfvc" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.906822 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:45 crc kubenswrapper[4689]: E1013 21:13:45.908774 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:46.408761652 +0000 UTC m=+143.327006727 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.914745 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-j5nvb"] Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.916607 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/69af70ca-2b63-4a93-86de-d9e5f44dffb3-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-vksbd\" (UID: \"69af70ca-2b63-4a93-86de-d9e5f44dffb3\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vksbd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.934399 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-z7dn5" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.934399 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dsbhf" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.938913 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pxhk\" (UniqueName: \"kubernetes.io/projected/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-kube-api-access-6pxhk\") pod \"collect-profiles-29339820-8wtvj\" (UID: \"62c53fad-57ac-4c62-86aa-f73e4e35b1f4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.951458 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nmkts" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.953186 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqzl4\" (UniqueName: \"kubernetes.io/projected/a557b09e-dd8c-4ad4-8a24-d817c186d537-kube-api-access-rqzl4\") pod \"multus-admission-controller-857f4d67dd-qtbn4\" (UID: \"a557b09e-dd8c-4ad4-8a24-d817c186d537\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qtbn4" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.959113 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2hmcd" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.960901 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzsv9\" (UniqueName: \"kubernetes.io/projected/adcf30c3-b406-4735-8488-f8380a7ab1e8-kube-api-access-pzsv9\") pod \"packageserver-d55dfcdfc-q26mq\" (UID: \"adcf30c3-b406-4735-8488-f8380a7ab1e8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.971193 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.989462 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-phmxg" Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.991281 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-njhdl"] Oct 13 21:13:45 crc kubenswrapper[4689]: I1013 21:13:45.998661 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-fw54p" Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.008364 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:46 crc kubenswrapper[4689]: E1013 21:13:46.009742 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:46.509711522 +0000 UTC m=+143.427956747 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.025569 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-lbpft" Oct 13 21:13:46 crc kubenswrapper[4689]: W1013 21:13:46.051065 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5e86a349_8bfc_4667_8213_a034dc8c1a7b.slice/crio-e904e9eb9139cb07382b9f7d79cb97831b8efc4d52c4e887214cde2b94f6b5d5 WatchSource:0}: Error finding container e904e9eb9139cb07382b9f7d79cb97831b8efc4d52c4e887214cde2b94f6b5d5: Status 404 returned error can't find the container with id e904e9eb9139cb07382b9f7d79cb97831b8efc4d52c4e887214cde2b94f6b5d5 Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.100669 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-nr7vc"] Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.111979 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:46 crc kubenswrapper[4689]: E1013 21:13:46.112631 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:46.612614585 +0000 UTC m=+143.530859670 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.144914 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vksbd" Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.154316 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-qtbn4" Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.213066 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:46 crc kubenswrapper[4689]: E1013 21:13:46.213714 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:46.713689038 +0000 UTC m=+143.631934123 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.215940 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" Oct 13 21:13:46 crc kubenswrapper[4689]: W1013 21:13:46.261680 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae1eb6bd_4b07_4540_b12c_c5fa318f5b64.slice/crio-e098baed02a9c0a836a1b7295d5bf28dfadfa0ea857b58e313c914fb906fef9a WatchSource:0}: Error finding container e098baed02a9c0a836a1b7295d5bf28dfadfa0ea857b58e313c914fb906fef9a: Status 404 returned error can't find the container with id e098baed02a9c0a836a1b7295d5bf28dfadfa0ea857b58e313c914fb906fef9a Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.316275 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:46 crc kubenswrapper[4689]: E1013 21:13:46.317179 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:46.817164687 +0000 UTC m=+143.735409772 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.379188 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv"] Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.393570 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-6sjk2"] Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.417489 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:46 crc kubenswrapper[4689]: E1013 21:13:46.418158 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:46.918124397 +0000 UTC m=+143.836369482 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.440899 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c"] Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.446557 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-5bzn7"] Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.461439 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2dd45"] Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.519152 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:46 crc kubenswrapper[4689]: E1013 21:13:46.519564 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:47.019544229 +0000 UTC m=+143.937789314 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:46 crc kubenswrapper[4689]: W1013 21:13:46.616642 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode67221f8_ac2e_425b_8cb4_1bd164f4c2a7.slice/crio-ce5a3bcd078f04de488af4a93af3374f61608d8147f97ad8f074bc1237c63073 WatchSource:0}: Error finding container ce5a3bcd078f04de488af4a93af3374f61608d8147f97ad8f074bc1237c63073: Status 404 returned error can't find the container with id ce5a3bcd078f04de488af4a93af3374f61608d8147f97ad8f074bc1237c63073 Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.620749 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:46 crc kubenswrapper[4689]: E1013 21:13:46.621265 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:47.121226149 +0000 UTC m=+144.039471234 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.693933 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-gjkn6" podStartSLOduration=122.693894118 podStartE2EDuration="2m2.693894118s" podCreationTimestamp="2025-10-13 21:11:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:46.679128135 +0000 UTC m=+143.597373220" watchObservedRunningTime="2025-10-13 21:13:46.693894118 +0000 UTC m=+143.612139213" Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.715431 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-ptbs8" podStartSLOduration=121.715403183 podStartE2EDuration="2m1.715403183s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:46.713954814 +0000 UTC m=+143.632199899" watchObservedRunningTime="2025-10-13 21:13:46.715403183 +0000 UTC m=+143.633648268" Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.732178 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:46 crc kubenswrapper[4689]: E1013 21:13:46.733298 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:47.233280841 +0000 UTC m=+144.151525926 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:46 crc kubenswrapper[4689]: W1013 21:13:46.744384 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod89108982_aec6_4c39_a675_508c22d2bf80.slice/crio-95c736f675d7c4062332435472d3b011d2e6dae7a70c199b4eb372cd5afd240d WatchSource:0}: Error finding container 95c736f675d7c4062332435472d3b011d2e6dae7a70c199b4eb372cd5afd240d: Status 404 returned error can't find the container with id 95c736f675d7c4062332435472d3b011d2e6dae7a70c199b4eb372cd5afd240d Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.762025 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9bdb5"] Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.847641 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:46 crc kubenswrapper[4689]: E1013 21:13:46.848093 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:47.348072857 +0000 UTC m=+144.266317942 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.900967 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p"] Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.904403 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-znbxz"] Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.907044 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-z7dn5"] Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.944342 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv" event={"ID":"69b8063f-9b07-404d-91d8-e143fa0aa521","Type":"ContainerStarted","Data":"da6adf9bba65270ac3b37118e99cdef14e230822d7d2e0e67414a696a224694e"} Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.949725 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:46 crc kubenswrapper[4689]: E1013 21:13:46.950311 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:47.450297122 +0000 UTC m=+144.368542207 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.956813 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" event={"ID":"110831e6-2732-4fe8-8f36-3505ff66495f","Type":"ContainerStarted","Data":"813fd083cc05d0332d6f01cdcb1db1bf41a6f1df099ae00a2ae70b43fbc33c8d"} Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.963389 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" event={"ID":"ba56493b-6d33-4bee-bbb2-a431a7622cdb","Type":"ContainerStarted","Data":"80d525f5679131f6575c5f6cf0d11bf0dfde6a29b35eb208311c4c6bd1799d5f"} Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.975188 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-njhdl" event={"ID":"5e86a349-8bfc-4667-8213-a034dc8c1a7b","Type":"ContainerStarted","Data":"e904e9eb9139cb07382b9f7d79cb97831b8efc4d52c4e887214cde2b94f6b5d5"} Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.978915 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" event={"ID":"202442b7-241e-44ee-b24f-0eac63864890","Type":"ContainerStarted","Data":"7f501064fa0bd235390f6bd45fcb109b646b0be2715ad146504157ab71bcb770"} Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.982687 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" event={"ID":"bd0e15ea-205d-4e73-822b-b16cc8f33ab2","Type":"ContainerStarted","Data":"3db4760c48ac99b346ab3ff856a46fe0141eb1807663884942c9e4dac062b717"} Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.982873 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.993648 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c" event={"ID":"51d19dc5-9ac9-4b48-93ae-5d29535e1df6","Type":"ContainerStarted","Data":"9225019461a6278e1f1805a53afb470ccb77981b5a718761835cc7a393f3e36d"} Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.997791 4689 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-p7j66 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Oct 13 21:13:46 crc kubenswrapper[4689]: I1013 21:13:46.997844 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" podUID="bd0e15ea-205d-4e73-822b-b16cc8f33ab2" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.003412 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj"] Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.010358 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-phmxg" event={"ID":"e67221f8-ac2e-425b-8cb4-1bd164f4c2a7","Type":"ContainerStarted","Data":"ce5a3bcd078f04de488af4a93af3374f61608d8147f97ad8f074bc1237c63073"} Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.018941 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg" event={"ID":"187c4133-36b2-4d56-be78-75d555af16a4","Type":"ContainerStarted","Data":"64eaf454701dc43a149396129497ae2851be4a0f468cf61b947a7031a724e36a"} Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.019706 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg" event={"ID":"187c4133-36b2-4d56-be78-75d555af16a4","Type":"ContainerStarted","Data":"91bd39c90adf5dd7e6de78e2ce14c749a127299bf2819b513d32ed3229889056"} Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.040025 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mql5t"] Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.045836 4689 generic.go:334] "Generic (PLEG): container finished" podID="58d51d91-5495-47c7-a6ed-9a8964688b49" containerID="7d40b4f7f99c28967e61407e07ef0ad62d98079e9ce97264f4d2933217dd04bb" exitCode=0 Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.045956 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tckgt" event={"ID":"58d51d91-5495-47c7-a6ed-9a8964688b49","Type":"ContainerDied","Data":"7d40b4f7f99c28967e61407e07ef0ad62d98079e9ce97264f4d2933217dd04bb"} Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.048834 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-nr7vc" event={"ID":"ae1eb6bd-4b07-4540-b12c-c5fa318f5b64","Type":"ContainerStarted","Data":"e098baed02a9c0a836a1b7295d5bf28dfadfa0ea857b58e313c914fb906fef9a"} Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.051037 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:47 crc kubenswrapper[4689]: E1013 21:13:47.052706 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:47.551370705 +0000 UTC m=+144.469615790 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.058894 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp"] Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.077446 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" event={"ID":"72d762df-fe7c-4966-a3fe-62904ca5b93a","Type":"ContainerStarted","Data":"5c180f61062ac4bb06a928c8961c7bd419bfa522a54667b97a4dfc477b26376c"} Oct 13 21:13:47 crc kubenswrapper[4689]: W1013 21:13:47.082311 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod42104006_5aa2_4e76_9a90_5402fb280c09.slice/crio-b934702d5dd4ad6c3d07ff66296a843967400cb457c84e371bd6b917c5d42005 WatchSource:0}: Error finding container b934702d5dd4ad6c3d07ff66296a843967400cb457c84e371bd6b917c5d42005: Status 404 returned error can't find the container with id b934702d5dd4ad6c3d07ff66296a843967400cb457c84e371bd6b917c5d42005 Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.084030 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2dd45" event={"ID":"299e04b4-ceeb-4acb-bd40-516a2eef1486","Type":"ContainerStarted","Data":"dcb500f9074b2f9bd3e4e9bfeab9c5bac25af5e5205f4f5549bdfb93f82c307d"} Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.086950 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-6sjk2" event={"ID":"2d213635-9d7d-4d87-9b11-6744bbac9824","Type":"ContainerStarted","Data":"420b412cc083cc205794c4f949464c27dac7b3fb787ba1e40bf1dd69d3536e2d"} Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.096489 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" event={"ID":"9db38791-f017-4d4f-b9e9-08f3ccd38704","Type":"ContainerStarted","Data":"0fa25d8e73b20b440184669215b51e26c0eece8e5c65ad45e6dff7cb6233b39f"} Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.097175 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.098597 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" event={"ID":"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0","Type":"ContainerStarted","Data":"51e759a4a0b818ab576728b1739b294ee931581a03fbf9202bb7aef0ba1b1e6f"} Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.100413 4689 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-7gghw container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.7:6443/healthz\": dial tcp 10.217.0.7:6443: connect: connection refused" start-of-body= Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.100464 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" podUID="9db38791-f017-4d4f-b9e9-08f3ccd38704" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.7:6443/healthz\": dial tcp 10.217.0.7:6443: connect: connection refused" Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.101717 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-5bzn7" event={"ID":"7efd4908-44a4-4b78-84cc-46c7578ee5a7","Type":"ContainerStarted","Data":"3ef3e87b16b7f449b3c425df5f8b9ddb65d2ccdb6d419c53084b3dc6eccdfdf7"} Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.104507 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cm8l7" event={"ID":"89f4bdbb-1a86-42b4-8067-7be1209a54cc","Type":"ContainerStarted","Data":"2a95a62ed10fe33c74e7a5bd3617dbc97642678240dc74c651e32d05f9545719"} Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.107085 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-xsgbv" Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.110745 4689 patch_prober.go:28] interesting pod/downloads-7954f5f757-xsgbv container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.110825 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xsgbv" podUID="1f429fdd-330f-4526-9fcd-fb6293286256" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.152899 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:47 crc kubenswrapper[4689]: E1013 21:13:47.153745 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:47.653718472 +0000 UTC m=+144.571963557 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.196071 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-z5vx4" podStartSLOduration=122.196046315 podStartE2EDuration="2m2.196046315s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:47.192645033 +0000 UTC m=+144.110890138" watchObservedRunningTime="2025-10-13 21:13:47.196046315 +0000 UTC m=+144.114291400" Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.257474 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:47 crc kubenswrapper[4689]: E1013 21:13:47.258671 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:47.75865058 +0000 UTC m=+144.676895665 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.321834 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-lbpft"] Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.343952 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.359947 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:47 crc kubenswrapper[4689]: E1013 21:13:47.360409 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:47.860378732 +0000 UTC m=+144.778623817 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.462401 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:47 crc kubenswrapper[4689]: E1013 21:13:47.462977 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:47.962956865 +0000 UTC m=+144.881201950 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.490682 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" podStartSLOduration=122.49065949 podStartE2EDuration="2m2.49065949s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:47.478314054 +0000 UTC m=+144.396559139" watchObservedRunningTime="2025-10-13 21:13:47.49065949 +0000 UTC m=+144.408904575" Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.566307 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:47 crc kubenswrapper[4689]: E1013 21:13:47.569170 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:48.069150728 +0000 UTC m=+144.987395813 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.667512 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:47 crc kubenswrapper[4689]: E1013 21:13:47.668877 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:48.168840813 +0000 UTC m=+145.087085898 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.669754 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:47 crc kubenswrapper[4689]: E1013 21:13:47.670161 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:48.170154039 +0000 UTC m=+145.088399124 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.727948 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4zcht" podStartSLOduration=122.727927172 podStartE2EDuration="2m2.727927172s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:47.725743923 +0000 UTC m=+144.643989008" watchObservedRunningTime="2025-10-13 21:13:47.727927172 +0000 UTC m=+144.646172257" Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.770907 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:47 crc kubenswrapper[4689]: E1013 21:13:47.771258 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:48.271239042 +0000 UTC m=+145.189484127 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.874284 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:47 crc kubenswrapper[4689]: E1013 21:13:47.875819 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:48.37579729 +0000 UTC m=+145.294042375 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.903782 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" podStartSLOduration=122.903756892 podStartE2EDuration="2m2.903756892s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:47.87947845 +0000 UTC m=+144.797723535" watchObservedRunningTime="2025-10-13 21:13:47.903756892 +0000 UTC m=+144.822001977" Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.925834 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qtbn4"] Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.925912 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8f84m"] Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.925927 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-dsbhf"] Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.925952 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2kfvc"] Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.943902 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-fw54p"] Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.943974 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vksbd"] Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.946154 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-2hmcd"] Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.990809 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:47 crc kubenswrapper[4689]: E1013 21:13:47.991133 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:48.491111001 +0000 UTC m=+145.409356086 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:47 crc kubenswrapper[4689]: I1013 21:13:47.991477 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:47 crc kubenswrapper[4689]: E1013 21:13:47.991800 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:48.49179365 +0000 UTC m=+145.410038735 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.014733 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" podStartSLOduration=124.014712834 podStartE2EDuration="2m4.014712834s" podCreationTimestamp="2025-10-13 21:11:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:48.01123546 +0000 UTC m=+144.929480535" watchObservedRunningTime="2025-10-13 21:13:48.014712834 +0000 UTC m=+144.932957919" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.069211 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nmkts"] Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.096311 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:48 crc kubenswrapper[4689]: E1013 21:13:48.097658 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:48.597616322 +0000 UTC m=+145.515861407 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.172832 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq"] Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.191972 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-xsgbv" podStartSLOduration=123.191948202 podStartE2EDuration="2m3.191948202s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:48.185030034 +0000 UTC m=+145.103275119" watchObservedRunningTime="2025-10-13 21:13:48.191948202 +0000 UTC m=+145.110193287" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.198556 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:48 crc kubenswrapper[4689]: E1013 21:13:48.199080 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:48.699064336 +0000 UTC m=+145.617309421 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.199144 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-njhdl" event={"ID":"5e86a349-8bfc-4667-8213-a034dc8c1a7b","Type":"ContainerStarted","Data":"eba61c31a6b59b24b657de5bc419278275e8559c1f0daa518a8448d0604ced4c"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.223491 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-lbpft" event={"ID":"5a4ca39a-9c29-4904-aa12-cca8aff1453a","Type":"ContainerStarted","Data":"42a8608e4d859b9fc491869503f503c61adf3aa94410cba0a251de33d474831b"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.224694 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-njhdl" podStartSLOduration=123.224673763 podStartE2EDuration="2m3.224673763s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:48.22383642 +0000 UTC m=+145.142081505" watchObservedRunningTime="2025-10-13 21:13:48.224673763 +0000 UTC m=+145.142918848" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.232114 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-phmxg" event={"ID":"e67221f8-ac2e-425b-8cb4-1bd164f4c2a7","Type":"ContainerStarted","Data":"3decbad40a4e09ac652086b82b7493ff0d3573d760c7a631ec8922cad2c2e009"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.250651 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qtbn4" event={"ID":"a557b09e-dd8c-4ad4-8a24-d817c186d537","Type":"ContainerStarted","Data":"dfe42c8e77acb9a3cfa648beee08b165243b28597a9a953f9e22ceed28364a96"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.256272 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-phmxg" podStartSLOduration=6.256250474 podStartE2EDuration="6.256250474s" podCreationTimestamp="2025-10-13 21:13:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:48.255718429 +0000 UTC m=+145.173963504" watchObservedRunningTime="2025-10-13 21:13:48.256250474 +0000 UTC m=+145.174495559" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.266306 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tckgt" event={"ID":"58d51d91-5495-47c7-a6ed-9a8964688b49","Type":"ContainerStarted","Data":"003fe542940947479c2366f6904cafbcc214c78ed42cd07007dd100d790af073"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.266398 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tckgt" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.271762 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg" event={"ID":"187c4133-36b2-4d56-be78-75d555af16a4","Type":"ContainerStarted","Data":"c08db8fac1802320e3e00be747dee77f3d572bd981503053f9f87420ca3dafe4"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.273661 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dsbhf" event={"ID":"79816a71-299c-4111-a2e5-930f6b05710e","Type":"ContainerStarted","Data":"6e59f8386ea92510d93ac0bd6f93a35cf57367560134a7f0afede27056b8459d"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.274459 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vksbd" event={"ID":"69af70ca-2b63-4a93-86de-d9e5f44dffb3","Type":"ContainerStarted","Data":"3942da5d10fa592f4852be65515d58b97ef755c3833e7652e43720c0b4f66ec7"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.275522 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" event={"ID":"42104006-5aa2-4e76-9a90-5402fb280c09","Type":"ContainerStarted","Data":"b934702d5dd4ad6c3d07ff66296a843967400cb457c84e371bd6b917c5d42005"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.278110 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2hmcd" event={"ID":"74959b1a-d8f4-415f-a901-4c5c162c41c9","Type":"ContainerStarted","Data":"dc010a7bf5f2f447b2366b2fd66c156928b3fe2ccfa3ad63babec3a92b846469"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.286112 4689 generic.go:334] "Generic (PLEG): container finished" podID="f762bf8c-9be0-4bbe-a2ec-66f701aa99b0" containerID="e44fb451f46d64faa72dfdfb076e7ab6cd76e64408ac0c96b195001640b18cef" exitCode=0 Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.286240 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" event={"ID":"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0","Type":"ContainerDied","Data":"e44fb451f46d64faa72dfdfb076e7ab6cd76e64408ac0c96b195001640b18cef"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.304357 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:48 crc kubenswrapper[4689]: E1013 21:13:48.306209 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:48.806185243 +0000 UTC m=+145.724430328 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.306717 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:48 crc kubenswrapper[4689]: E1013 21:13:48.308352 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:48.808339842 +0000 UTC m=+145.726584927 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.313872 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tckgt" podStartSLOduration=123.313833092 podStartE2EDuration="2m3.313833092s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:48.3023598 +0000 UTC m=+145.220604885" watchObservedRunningTime="2025-10-13 21:13:48.313833092 +0000 UTC m=+145.232078177" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.331107 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5wbmg" podStartSLOduration=123.331085472 podStartE2EDuration="2m3.331085472s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:48.32915769 +0000 UTC m=+145.247402775" watchObservedRunningTime="2025-10-13 21:13:48.331085472 +0000 UTC m=+145.249330557" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.383784 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-znbxz" event={"ID":"7fdfc234-e5da-413a-89e5-226f99fe29af","Type":"ContainerStarted","Data":"8e865e51d45ec0df2578d4fe6856e3d53e892b3892bcc7f6c5873cbaa171de09"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.396407 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p" event={"ID":"3220bc21-2d0b-42ac-8970-ce9e0d52d896","Type":"ContainerStarted","Data":"6fdc82b2b7e79ebdde632e57f32c2b8be702df551df4d8dcd2e181a675af54e8"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.408708 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8f84m" event={"ID":"8306e09d-864e-4342-adc2-e56288612d89","Type":"ContainerStarted","Data":"e67c25072dc01fbc3615f0d36e15feb4f1f4bc4723b73aae6abd32cdfbf402fc"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.412437 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:48 crc kubenswrapper[4689]: E1013 21:13:48.412853 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:48.912815508 +0000 UTC m=+145.831060593 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.449681 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-nlljk" event={"ID":"89108982-aec6-4c39-a675-508c22d2bf80","Type":"ContainerStarted","Data":"95c736f675d7c4062332435472d3b011d2e6dae7a70c199b4eb372cd5afd240d"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.490918 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-nr7vc" event={"ID":"ae1eb6bd-4b07-4540-b12c-c5fa318f5b64","Type":"ContainerStarted","Data":"166c427d940dc6fc281baaa36f08a29a21ada8d770ddb83d3017bb0cd8cf61e8"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.491996 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-nr7vc" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.510461 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-nlljk" podStartSLOduration=123.510432877 podStartE2EDuration="2m3.510432877s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:48.50615243 +0000 UTC m=+145.424397515" watchObservedRunningTime="2025-10-13 21:13:48.510432877 +0000 UTC m=+145.428677962" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.514612 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:48 crc kubenswrapper[4689]: E1013 21:13:48.516750 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:49.016729859 +0000 UTC m=+145.934974944 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.517257 4689 patch_prober.go:28] interesting pod/console-operator-58897d9998-nr7vc container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.517339 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-nr7vc" podUID="ae1eb6bd-4b07-4540-b12c-c5fa318f5b64" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.541087 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-nr7vc" podStartSLOduration=123.541060041 podStartE2EDuration="2m3.541060041s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:48.53954902 +0000 UTC m=+145.457794115" watchObservedRunningTime="2025-10-13 21:13:48.541060041 +0000 UTC m=+145.459305126" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.551979 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" event={"ID":"202442b7-241e-44ee-b24f-0eac63864890","Type":"ContainerStarted","Data":"150a8c60c98ae7d14b37091f369e83dc8d94ceb3d2f09bc7789e5a3de496f105"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.560888 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-5bzn7" event={"ID":"7efd4908-44a4-4b78-84cc-46c7578ee5a7","Type":"ContainerStarted","Data":"0c6d0855bc70814bd30414ba9a7fe804d6f039a22091a04bfdef289c7f6f8246"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.573308 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" event={"ID":"24d70a59-b89a-4f25-8033-38d879cfe981","Type":"ContainerStarted","Data":"a9e4acfd4d2d6eefd2a6190abc421c5841f6e82b06f162be450f83073b193c67"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.589191 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-fdwnr" podStartSLOduration=123.589166311 podStartE2EDuration="2m3.589166311s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:48.580611418 +0000 UTC m=+145.498856493" watchObservedRunningTime="2025-10-13 21:13:48.589166311 +0000 UTC m=+145.507411396" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.591278 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2kfvc" event={"ID":"4442f39b-048e-493e-86b8-256c5c39dca5","Type":"ContainerStarted","Data":"9e2a48b76bd60e2afc09f0f6a0658aa4a0027e8aaf7d30b4856c90f4928a96d5"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.603212 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" event={"ID":"d3229a49-3424-464a-b479-460fb0a21620","Type":"ContainerStarted","Data":"b827631fad039b9ceca3764aa217040de6494aef1ddd76364ad99aa2fc73521a"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.612967 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-fw54p" event={"ID":"99ddc02a-f527-40a5-a6dd-330e712e955d","Type":"ContainerStarted","Data":"0e49c0466d2a30e2b7b4272104528236c543a2e9ad9654b3fa177d76291b1c32"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.616651 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-5bzn7" podStartSLOduration=6.6166373400000005 podStartE2EDuration="6.61663734s" podCreationTimestamp="2025-10-13 21:13:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:48.616464745 +0000 UTC m=+145.534709830" watchObservedRunningTime="2025-10-13 21:13:48.61663734 +0000 UTC m=+145.534882425" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.628820 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:48 crc kubenswrapper[4689]: E1013 21:13:48.629519 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:49.129472289 +0000 UTC m=+146.047717374 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.630098 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:48 crc kubenswrapper[4689]: E1013 21:13:48.633813 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:49.133801877 +0000 UTC m=+146.052046952 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.649884 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-z7dn5" event={"ID":"958def5c-022a-4947-b863-499dad6954dc","Type":"ContainerStarted","Data":"04428a8c9ce08adb8d20884d96bc636cdc33287f03a262c6bcdfb5488a594f0e"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.649948 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-z7dn5" event={"ID":"958def5c-022a-4947-b863-499dad6954dc","Type":"ContainerStarted","Data":"b70e836b4a9a7177132f411dfc1034be2d2574b79007f8224a0834bc9b23029c"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.677045 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-z7dn5" podStartSLOduration=123.677021694 podStartE2EDuration="2m3.677021694s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:48.67245251 +0000 UTC m=+145.590697615" watchObservedRunningTime="2025-10-13 21:13:48.677021694 +0000 UTC m=+145.595266779" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.685551 4689 generic.go:334] "Generic (PLEG): container finished" podID="110831e6-2732-4fe8-8f36-3505ff66495f" containerID="8e9bad8c4bfb3065936383a9692b46ff0d83800453df61f273bdd2f895538ded" exitCode=0 Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.685620 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" event={"ID":"110831e6-2732-4fe8-8f36-3505ff66495f","Type":"ContainerDied","Data":"8e9bad8c4bfb3065936383a9692b46ff0d83800453df61f273bdd2f895538ded"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.691136 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" event={"ID":"62c53fad-57ac-4c62-86aa-f73e4e35b1f4","Type":"ContainerStarted","Data":"9405df9ec5b92c8a61815aac6017fe3545f89cf18bc51e8370c6c1c6aaa81610"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.700870 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nmkts" event={"ID":"1c2ff4c0-e878-4984-a12a-1b4f8cb3bc54","Type":"ContainerStarted","Data":"73fe13731d70a7d557ecf96cb392564cc9a096554697b197462833fb820e2ff2"} Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.702803 4689 patch_prober.go:28] interesting pod/downloads-7954f5f757-xsgbv container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.702866 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xsgbv" podUID="1f429fdd-330f-4526-9fcd-fb6293286256" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.731037 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.732252 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:13:48 crc kubenswrapper[4689]: E1013 21:13:48.732932 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:49.232888716 +0000 UTC m=+146.151133861 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.805397 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.833229 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.843730 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:48 crc kubenswrapper[4689]: E1013 21:13:48.845354 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:49.345330879 +0000 UTC m=+146.263575964 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.846820 4689 patch_prober.go:28] interesting pod/router-default-5444994796-nlljk container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.846906 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nlljk" podUID="89108982-aec6-4c39-a675-508c22d2bf80" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Oct 13 21:13:48 crc kubenswrapper[4689]: I1013 21:13:48.953791 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:48 crc kubenswrapper[4689]: E1013 21:13:48.954153 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:49.454138313 +0000 UTC m=+146.372383388 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.059505 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:49 crc kubenswrapper[4689]: E1013 21:13:49.059937 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:49.559922364 +0000 UTC m=+146.478167449 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.162344 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:49 crc kubenswrapper[4689]: E1013 21:13:49.163289 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:49.663244128 +0000 UTC m=+146.581489213 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.163459 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:49 crc kubenswrapper[4689]: E1013 21:13:49.163898 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:49.663891616 +0000 UTC m=+146.582136701 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.266988 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:49 crc kubenswrapper[4689]: E1013 21:13:49.267436 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:49.767406226 +0000 UTC m=+146.685651311 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.267721 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:49 crc kubenswrapper[4689]: E1013 21:13:49.268218 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:49.768203167 +0000 UTC m=+146.686448242 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.369329 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:49 crc kubenswrapper[4689]: E1013 21:13:49.369862 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:49.869841096 +0000 UTC m=+146.788086181 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.470889 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:49 crc kubenswrapper[4689]: E1013 21:13:49.471790 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:49.971763272 +0000 UTC m=+146.890008357 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.572282 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:49 crc kubenswrapper[4689]: E1013 21:13:49.572610 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:50.072567547 +0000 UTC m=+146.990812632 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.572739 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:49 crc kubenswrapper[4689]: E1013 21:13:49.573289 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:50.073270636 +0000 UTC m=+146.991515721 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.673553 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:49 crc kubenswrapper[4689]: E1013 21:13:49.673786 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:50.173756753 +0000 UTC m=+147.092001838 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.674306 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:49 crc kubenswrapper[4689]: E1013 21:13:49.674682 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:50.174667489 +0000 UTC m=+147.092912574 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.775955 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:49 crc kubenswrapper[4689]: E1013 21:13:49.776111 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:50.27608424 +0000 UTC m=+147.194329325 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.776863 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:49 crc kubenswrapper[4689]: E1013 21:13:49.777361 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:50.277341895 +0000 UTC m=+147.195587000 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.786181 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" event={"ID":"24d70a59-b89a-4f25-8033-38d879cfe981","Type":"ContainerStarted","Data":"a745c59035934bbc579dab67385e8eb997e5adaca1889ebd256897ba17bb0276"} Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.786237 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" event={"ID":"24d70a59-b89a-4f25-8033-38d879cfe981","Type":"ContainerStarted","Data":"49d3fdf0fa6af5c472ffd660f91eac6937cc0c8c59c5593c4020cda7a17c5c8f"} Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.814439 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" event={"ID":"d3229a49-3424-464a-b479-460fb0a21620","Type":"ContainerStarted","Data":"a3d0910cde11fdad182160966055192ffb84434562bb849c66e8b730ecc41cd7"} Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.815710 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.826864 4689 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-mql5t container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.22:8080/healthz\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.826943 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" podUID="d3229a49-3424-464a-b479-460fb0a21620" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.22:8080/healthz\": dial tcp 10.217.0.22:8080: connect: connection refused" Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.827549 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-p2tmp" podStartSLOduration=124.827527942 podStartE2EDuration="2m4.827527942s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:49.826938845 +0000 UTC m=+146.745183930" watchObservedRunningTime="2025-10-13 21:13:49.827527942 +0000 UTC m=+146.745773027" Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.855388 4689 patch_prober.go:28] interesting pod/router-default-5444994796-nlljk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 21:13:49 crc kubenswrapper[4689]: [-]has-synced failed: reason withheld Oct 13 21:13:49 crc kubenswrapper[4689]: [+]process-running ok Oct 13 21:13:49 crc kubenswrapper[4689]: healthz check failed Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.855442 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nlljk" podUID="89108982-aec6-4c39-a675-508c22d2bf80" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.856839 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" event={"ID":"42104006-5aa2-4e76-9a90-5402fb280c09","Type":"ContainerStarted","Data":"86a0b9b0581b3255f8f2bbbcb62761d3cfb4b94b39d5a31fac4d6e018e1ecf37"} Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.876711 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" podStartSLOduration=124.876683111 podStartE2EDuration="2m4.876683111s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:49.86860175 +0000 UTC m=+146.786846835" watchObservedRunningTime="2025-10-13 21:13:49.876683111 +0000 UTC m=+146.794928216" Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.878406 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:49 crc kubenswrapper[4689]: E1013 21:13:49.883360 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:50.383330992 +0000 UTC m=+147.301576077 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.889221 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2hmcd" event={"ID":"74959b1a-d8f4-415f-a901-4c5c162c41c9","Type":"ContainerStarted","Data":"ba6ee4eed0cc40aadbe36d93c8976161cb9e49427ddecbdd75fd4cb91a46cfb1"} Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.893040 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-9bdb5" podStartSLOduration=124.893020776 podStartE2EDuration="2m4.893020776s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:49.889772617 +0000 UTC m=+146.808017702" watchObservedRunningTime="2025-10-13 21:13:49.893020776 +0000 UTC m=+146.811265861" Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.898084 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c" event={"ID":"51d19dc5-9ac9-4b48-93ae-5d29535e1df6","Type":"ContainerStarted","Data":"4da7cec9005a3e9241eb5f48595bbf709bdded841d27876c9cb007ca7d7230e0"} Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.909263 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-nlljk" event={"ID":"89108982-aec6-4c39-a675-508c22d2bf80","Type":"ContainerStarted","Data":"59e1edc6bc5f8543d807a6eafcb3ae637ce9cb4c2fad8078dcf9a611ddde89f9"} Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.915701 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qtbn4" event={"ID":"a557b09e-dd8c-4ad4-8a24-d817c186d537","Type":"ContainerStarted","Data":"eeb6780153bc9a9d0641d9fb6f78044b18167b85f580c3b0a557a381a2eacd23"} Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.921962 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2dd45" event={"ID":"299e04b4-ceeb-4acb-bd40-516a2eef1486","Type":"ContainerStarted","Data":"18fd3ef550738e8eaafb2a7f5a0db7f67c79940f3f765abb3f496fbed0d80dbf"} Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.925252 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2hmcd" podStartSLOduration=124.925227013 podStartE2EDuration="2m4.925227013s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:49.923111145 +0000 UTC m=+146.841356230" watchObservedRunningTime="2025-10-13 21:13:49.925227013 +0000 UTC m=+146.843472098" Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.950754 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-6sjk2" event={"ID":"2d213635-9d7d-4d87-9b11-6744bbac9824","Type":"ContainerStarted","Data":"f143d587a320ed0684ba011ca33b96d9bc7f4ae05e5d648b7d821aef186160f6"} Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.966927 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p" event={"ID":"3220bc21-2d0b-42ac-8970-ce9e0d52d896","Type":"ContainerStarted","Data":"7cbf4b28235e1b4d5d456b6cbee7dd50ca08ab3a28e46ceb6217d3d5694f83ae"} Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.968019 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p" Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.971662 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-m4j9c" podStartSLOduration=124.971624347 podStartE2EDuration="2m4.971624347s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:49.968970804 +0000 UTC m=+146.887215889" watchObservedRunningTime="2025-10-13 21:13:49.971624347 +0000 UTC m=+146.889869432" Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.984086 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:49 crc kubenswrapper[4689]: E1013 21:13:49.988269 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:50.48825024 +0000 UTC m=+147.406495325 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:49 crc kubenswrapper[4689]: I1013 21:13:49.998620 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.016689 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2dd45" podStartSLOduration=125.016641283 podStartE2EDuration="2m5.016641283s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:50.008018738 +0000 UTC m=+146.926263823" watchObservedRunningTime="2025-10-13 21:13:50.016641283 +0000 UTC m=+146.934886368" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.038310 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8f84m" event={"ID":"8306e09d-864e-4342-adc2-e56288612d89","Type":"ContainerStarted","Data":"f0dfafbd9b9db3347e5d1c4d16180c0dbba2f862889d0d8b19542b7b8074fb04"} Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.056288 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nmkts" event={"ID":"1c2ff4c0-e878-4984-a12a-1b4f8cb3bc54","Type":"ContainerStarted","Data":"fed45a5bbebf78e2f112c09170f2bcb767f463f7acf017a60a87847b43e66b38"} Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.102140 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:50 crc kubenswrapper[4689]: E1013 21:13:50.103337 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:50.603318554 +0000 UTC m=+147.521563629 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.106404 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s6x6p" podStartSLOduration=125.106388927 podStartE2EDuration="2m5.106388927s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:50.074821307 +0000 UTC m=+146.993066392" watchObservedRunningTime="2025-10-13 21:13:50.106388927 +0000 UTC m=+147.024634012" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.140846 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" event={"ID":"adcf30c3-b406-4735-8488-f8380a7ab1e8","Type":"ContainerStarted","Data":"fa213d5df372339337515534f7b79d672c63e08fb0c1f78a0f03a9e942ed568f"} Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.140913 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" event={"ID":"adcf30c3-b406-4735-8488-f8380a7ab1e8","Type":"ContainerStarted","Data":"7d168fa05a9e3aa6f0f7c3efc74abe9f72aa0a1c5370361b4d7d9166145ad35a"} Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.141824 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.144951 4689 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-q26mq container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" start-of-body= Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.145017 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" podUID="adcf30c3-b406-4735-8488-f8380a7ab1e8" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.155646 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8f84m" podStartSLOduration=125.155626568 podStartE2EDuration="2m5.155626568s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:50.154566499 +0000 UTC m=+147.072811584" watchObservedRunningTime="2025-10-13 21:13:50.155626568 +0000 UTC m=+147.073871653" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.177067 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vksbd" event={"ID":"69af70ca-2b63-4a93-86de-d9e5f44dffb3","Type":"ContainerStarted","Data":"d050dd445b87795983c546aa7a7c0dd0b8d391547faa396e2593177119be2305"} Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.193352 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nmkts" podStartSLOduration=125.193330495 podStartE2EDuration="2m5.193330495s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:50.189963984 +0000 UTC m=+147.108209079" watchObservedRunningTime="2025-10-13 21:13:50.193330495 +0000 UTC m=+147.111575580" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.206930 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:50 crc kubenswrapper[4689]: E1013 21:13:50.207339 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:50.707324996 +0000 UTC m=+147.625570081 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.261637 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2kfvc" event={"ID":"4442f39b-048e-493e-86b8-256c5c39dca5","Type":"ContainerStarted","Data":"88ea20d547ffb50e62ef26f8fe0bfae026ca5eeed08fafc8c8e84e0c121907a4"} Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.262647 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2kfvc" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.280872 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vksbd" podStartSLOduration=125.280841669 podStartE2EDuration="2m5.280841669s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:50.233195292 +0000 UTC m=+147.151440377" watchObservedRunningTime="2025-10-13 21:13:50.280841669 +0000 UTC m=+147.199086754" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.281051 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" podStartSLOduration=125.281045575 podStartE2EDuration="2m5.281045575s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:50.268216676 +0000 UTC m=+147.186461761" watchObservedRunningTime="2025-10-13 21:13:50.281045575 +0000 UTC m=+147.199290660" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.296109 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" event={"ID":"62c53fad-57ac-4c62-86aa-f73e4e35b1f4","Type":"ContainerStarted","Data":"cbf69b2aebc182bb9237fb099da4904c439d49bc380d5c3ff2770472e256db07"} Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.314009 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.314446 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-znbxz" event={"ID":"7fdfc234-e5da-413a-89e5-226f99fe29af","Type":"ContainerStarted","Data":"9660b486a2b916060a3c9f9004fd9e29004b3f7e6399ffaaa27c95f6d40e1774"} Oct 13 21:13:50 crc kubenswrapper[4689]: E1013 21:13:50.315267 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:50.815240276 +0000 UTC m=+147.733485351 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.348284 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv" event={"ID":"69b8063f-9b07-404d-91d8-e143fa0aa521","Type":"ContainerStarted","Data":"b7147048986e6d9096e4a19daebf39fb6c2645662a56db744069e5ef570cb2ff"} Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.349084 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.396779 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cm8l7" event={"ID":"89f4bdbb-1a86-42b4-8067-7be1209a54cc","Type":"ContainerStarted","Data":"18d7a895e12ae5018c3fe41e63f99997997bd7db03af58165458a086b34b47bc"} Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.396836 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cm8l7" event={"ID":"89f4bdbb-1a86-42b4-8067-7be1209a54cc","Type":"ContainerStarted","Data":"e320ecd81970a168ded20ae9ea86c151d7fcfbc45387df60f4b9b24bd079c8d9"} Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.414268 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.415985 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2kfvc" podStartSLOduration=125.415972329 podStartE2EDuration="2m5.415972329s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:50.35835319 +0000 UTC m=+147.276598285" watchObservedRunningTime="2025-10-13 21:13:50.415972329 +0000 UTC m=+147.334217414" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.416669 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:50 crc kubenswrapper[4689]: E1013 21:13:50.417034 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:50.917016488 +0000 UTC m=+147.835261573 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.432052 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dsbhf" event={"ID":"79816a71-299c-4111-a2e5-930f6b05710e","Type":"ContainerStarted","Data":"9233e09b0533dce523defc8ccad0af7916fee929861aa1fbcee9d96f05f7433c"} Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.476648 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" event={"ID":"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0","Type":"ContainerStarted","Data":"7fdf7297d5d7ae0ada5aceef0462ef167be9af7759500605b4bbfcc5648c0388"} Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.503032 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" event={"ID":"ba56493b-6d33-4bee-bbb2-a431a7622cdb","Type":"ContainerStarted","Data":"e27bf1de7324e2e95e666c426679add995c2f273401051e0a0d1d365d2d120c3"} Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.517382 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.517597 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" podStartSLOduration=125.517556627 podStartE2EDuration="2m5.517556627s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:50.438943875 +0000 UTC m=+147.357188960" watchObservedRunningTime="2025-10-13 21:13:50.517556627 +0000 UTC m=+147.435801712" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.527284 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-nr7vc" Oct 13 21:13:50 crc kubenswrapper[4689]: E1013 21:13:50.527819 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:51.027799796 +0000 UTC m=+147.946044881 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.618189 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-znbxz" podStartSLOduration=126.618170517 podStartE2EDuration="2m6.618170517s" podCreationTimestamp="2025-10-13 21:11:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:50.612875083 +0000 UTC m=+147.531120168" watchObservedRunningTime="2025-10-13 21:13:50.618170517 +0000 UTC m=+147.536415602" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.620240 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9dxhv" podStartSLOduration=125.620214963 podStartE2EDuration="2m5.620214963s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:50.518619065 +0000 UTC m=+147.436864150" watchObservedRunningTime="2025-10-13 21:13:50.620214963 +0000 UTC m=+147.538460038" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.620734 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:50 crc kubenswrapper[4689]: E1013 21:13:50.621230 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:51.12121429 +0000 UTC m=+148.039459375 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.723658 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:50 crc kubenswrapper[4689]: E1013 21:13:50.724003 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:51.223983259 +0000 UTC m=+148.142228344 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.777637 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dsbhf" podStartSLOduration=125.77761783 podStartE2EDuration="2m5.77761783s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:50.766571339 +0000 UTC m=+147.684816424" watchObservedRunningTime="2025-10-13 21:13:50.77761783 +0000 UTC m=+147.695862915" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.836512 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:50 crc kubenswrapper[4689]: E1013 21:13:50.837144 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:51.337115261 +0000 UTC m=+148.255360536 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.846910 4689 patch_prober.go:28] interesting pod/router-default-5444994796-nlljk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 21:13:50 crc kubenswrapper[4689]: [-]has-synced failed: reason withheld Oct 13 21:13:50 crc kubenswrapper[4689]: [+]process-running ok Oct 13 21:13:50 crc kubenswrapper[4689]: healthz check failed Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.846987 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nlljk" podUID="89108982-aec6-4c39-a675-508c22d2bf80" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.898858 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-skz8g" podStartSLOduration=126.898838362 podStartE2EDuration="2m6.898838362s" podCreationTimestamp="2025-10-13 21:11:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:50.895186152 +0000 UTC m=+147.813431237" watchObservedRunningTime="2025-10-13 21:13:50.898838362 +0000 UTC m=+147.817083447" Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.939229 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:50 crc kubenswrapper[4689]: E1013 21:13:50.939640 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:51.439621463 +0000 UTC m=+148.357866548 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:50 crc kubenswrapper[4689]: I1013 21:13:50.948144 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cm8l7" podStartSLOduration=125.948124404 podStartE2EDuration="2m5.948124404s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:50.946034278 +0000 UTC m=+147.864279363" watchObservedRunningTime="2025-10-13 21:13:50.948124404 +0000 UTC m=+147.866369489" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.040415 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:51 crc kubenswrapper[4689]: E1013 21:13:51.041021 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:51.540998474 +0000 UTC m=+148.459243689 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.142725 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:51 crc kubenswrapper[4689]: E1013 21:13:51.143487 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:51.643458555 +0000 UTC m=+148.561703640 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.244736 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:51 crc kubenswrapper[4689]: E1013 21:13:51.245260 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:51.745237007 +0000 UTC m=+148.663482092 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.346025 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:51 crc kubenswrapper[4689]: E1013 21:13:51.346284 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:51.846238988 +0000 UTC m=+148.764484063 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.346420 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:51 crc kubenswrapper[4689]: E1013 21:13:51.346844 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:51.846834434 +0000 UTC m=+148.765079709 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.447971 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:51 crc kubenswrapper[4689]: E1013 21:13:51.448190 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:51.948151204 +0000 UTC m=+148.866396289 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.448302 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:51 crc kubenswrapper[4689]: E1013 21:13:51.448689 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:51.948675009 +0000 UTC m=+148.866920094 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.508034 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2kfvc" event={"ID":"4442f39b-048e-493e-86b8-256c5c39dca5","Type":"ContainerStarted","Data":"b561d3e6bd4294022fc3f0e83c50d044d5a1c3cce62b70e19c78d6bb89755245"} Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.509767 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-lbpft" event={"ID":"5a4ca39a-9c29-4904-aa12-cca8aff1453a","Type":"ContainerStarted","Data":"3f9958ac1c8e2bf558fb304fc6d9b7cd557f0e781efdeb01ba7c73cf1485b3e4"} Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.512632 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" event={"ID":"f762bf8c-9be0-4bbe-a2ec-66f701aa99b0","Type":"ContainerStarted","Data":"a464f9b54e1c307468bd11f2bbdc9890743175ee4f76f2836e8162302ee4373f"} Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.514416 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" event={"ID":"110831e6-2732-4fe8-8f36-3505ff66495f","Type":"ContainerStarted","Data":"ed498f5980b5d488634ddbb6b02ce6040f796ac24d61e8362987162f85eaef90"} Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.516773 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qtbn4" event={"ID":"a557b09e-dd8c-4ad4-8a24-d817c186d537","Type":"ContainerStarted","Data":"2df8e086ce4dd64133a21b8bc80b6c6d827fe2dec20f950e2cb193511a6b0eb1"} Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.518792 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dsbhf" event={"ID":"79816a71-299c-4111-a2e5-930f6b05710e","Type":"ContainerStarted","Data":"3e2b8f16f4fb54755937222830ec5e1a96ba64c79a29b5e7da3f41014f446223"} Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.521034 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-6sjk2" event={"ID":"2d213635-9d7d-4d87-9b11-6744bbac9824","Type":"ContainerStarted","Data":"94d3063fc29960677723405af3517e402b93b30ebcb829d6dcec59d3bd1c8b95"} Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.523622 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-fw54p" event={"ID":"99ddc02a-f527-40a5-a6dd-330e712e955d","Type":"ContainerStarted","Data":"37cf7007895f111605aa715f1ff66f9eca07fb9f110c0254216ba37be64d45b9"} Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.523654 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-fw54p" event={"ID":"99ddc02a-f527-40a5-a6dd-330e712e955d","Type":"ContainerStarted","Data":"5b621079e5553bd4d0de04aa0bd7b076662f99c9ff5a0c572e2746f90a23f02b"} Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.524737 4689 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-mql5t container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.22:8080/healthz\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.524804 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" podUID="d3229a49-3424-464a-b479-460fb0a21620" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.22:8080/healthz\": dial tcp 10.217.0.22:8080: connect: connection refused" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.550068 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:51 crc kubenswrapper[4689]: E1013 21:13:51.550276 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:52.050238725 +0000 UTC m=+148.968483810 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.550817 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.550883 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.551200 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:51 crc kubenswrapper[4689]: E1013 21:13:51.556502 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:52.056476545 +0000 UTC m=+148.974721700 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.556829 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.558317 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.661217 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:51 crc kubenswrapper[4689]: E1013 21:13:51.661745 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:52.161702541 +0000 UTC m=+149.079947636 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.665564 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.665965 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:51 crc kubenswrapper[4689]: E1013 21:13:51.667136 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:52.167116009 +0000 UTC m=+149.085361094 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.667474 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.676698 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.677198 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" podStartSLOduration=127.677180542 podStartE2EDuration="2m7.677180542s" podCreationTimestamp="2025-10-13 21:11:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:51.621769963 +0000 UTC m=+148.540015048" watchObservedRunningTime="2025-10-13 21:13:51.677180542 +0000 UTC m=+148.595425627" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.677869 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.687968 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.723484 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" podStartSLOduration=126.723460963 podStartE2EDuration="2m6.723460963s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:51.680626636 +0000 UTC m=+148.598871721" watchObservedRunningTime="2025-10-13 21:13:51.723460963 +0000 UTC m=+148.641706058" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.724174 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-qtbn4" podStartSLOduration=126.724168682 podStartE2EDuration="2m6.724168682s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:51.719051113 +0000 UTC m=+148.637296218" watchObservedRunningTime="2025-10-13 21:13:51.724168682 +0000 UTC m=+148.642413767" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.759082 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jvx7z"] Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.760124 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jvx7z" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.769638 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:51 crc kubenswrapper[4689]: E1013 21:13:51.769964 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:52.269923208 +0000 UTC m=+149.188168303 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.770542 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:51 crc kubenswrapper[4689]: E1013 21:13:51.772446 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:52.272431467 +0000 UTC m=+149.190676552 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.810504 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jvx7z"] Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.853214 4689 patch_prober.go:28] interesting pod/router-default-5444994796-nlljk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 21:13:51 crc kubenswrapper[4689]: [-]has-synced failed: reason withheld Oct 13 21:13:51 crc kubenswrapper[4689]: [+]process-running ok Oct 13 21:13:51 crc kubenswrapper[4689]: healthz check failed Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.853775 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nlljk" podUID="89108982-aec6-4c39-a675-508c22d2bf80" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.873332 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:51 crc kubenswrapper[4689]: E1013 21:13:51.876422 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:52.376396648 +0000 UTC m=+149.294641733 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.876813 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-utilities\") pod \"community-operators-jvx7z\" (UID: \"0ce9ebc5-b00b-4f83-8420-6a58b073efa5\") " pod="openshift-marketplace/community-operators-jvx7z" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.876932 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.877329 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-catalog-content\") pod \"community-operators-jvx7z\" (UID: \"0ce9ebc5-b00b-4f83-8420-6a58b073efa5\") " pod="openshift-marketplace/community-operators-jvx7z" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.877422 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sptv4\" (UniqueName: \"kubernetes.io/projected/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-kube-api-access-sptv4\") pod \"community-operators-jvx7z\" (UID: \"0ce9ebc5-b00b-4f83-8420-6a58b073efa5\") " pod="openshift-marketplace/community-operators-jvx7z" Oct 13 21:13:51 crc kubenswrapper[4689]: E1013 21:13:51.877857 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:52.377847908 +0000 UTC m=+149.296092993 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.910954 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.923087 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.933194 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.980439 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.980781 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-catalog-content\") pod \"community-operators-jvx7z\" (UID: \"0ce9ebc5-b00b-4f83-8420-6a58b073efa5\") " pod="openshift-marketplace/community-operators-jvx7z" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.980811 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sptv4\" (UniqueName: \"kubernetes.io/projected/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-kube-api-access-sptv4\") pod \"community-operators-jvx7z\" (UID: \"0ce9ebc5-b00b-4f83-8420-6a58b073efa5\") " pod="openshift-marketplace/community-operators-jvx7z" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.980852 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-utilities\") pod \"community-operators-jvx7z\" (UID: \"0ce9ebc5-b00b-4f83-8420-6a58b073efa5\") " pod="openshift-marketplace/community-operators-jvx7z" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.981272 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-utilities\") pod \"community-operators-jvx7z\" (UID: \"0ce9ebc5-b00b-4f83-8420-6a58b073efa5\") " pod="openshift-marketplace/community-operators-jvx7z" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.982553 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-6sjk2" podStartSLOduration=126.98254148 podStartE2EDuration="2m6.98254148s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:51.952936174 +0000 UTC m=+148.871181259" watchObservedRunningTime="2025-10-13 21:13:51.98254148 +0000 UTC m=+148.900786565" Oct 13 21:13:51 crc kubenswrapper[4689]: I1013 21:13:51.986848 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-catalog-content\") pod \"community-operators-jvx7z\" (UID: \"0ce9ebc5-b00b-4f83-8420-6a58b073efa5\") " pod="openshift-marketplace/community-operators-jvx7z" Oct 13 21:13:51 crc kubenswrapper[4689]: E1013 21:13:51.986951 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:52.486931979 +0000 UTC m=+149.405177064 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.003846 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-fw54p" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.033083 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bk9hg"] Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.058201 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bk9hg" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.082566 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.082664 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d02f5e12-ab55-4649-94db-f248e569c2d1-catalog-content\") pod \"certified-operators-bk9hg\" (UID: \"d02f5e12-ab55-4649-94db-f248e569c2d1\") " pod="openshift-marketplace/certified-operators-bk9hg" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.082694 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d02f5e12-ab55-4649-94db-f248e569c2d1-utilities\") pod \"certified-operators-bk9hg\" (UID: \"d02f5e12-ab55-4649-94db-f248e569c2d1\") " pod="openshift-marketplace/certified-operators-bk9hg" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.082774 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62spr\" (UniqueName: \"kubernetes.io/projected/d02f5e12-ab55-4649-94db-f248e569c2d1-kube-api-access-62spr\") pod \"certified-operators-bk9hg\" (UID: \"d02f5e12-ab55-4649-94db-f248e569c2d1\") " pod="openshift-marketplace/certified-operators-bk9hg" Oct 13 21:13:52 crc kubenswrapper[4689]: E1013 21:13:52.083187 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:52.583173291 +0000 UTC m=+149.501418376 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.121555 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-fw54p" podStartSLOduration=10.121530035 podStartE2EDuration="10.121530035s" podCreationTimestamp="2025-10-13 21:13:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:52.079638784 +0000 UTC m=+148.997883889" watchObservedRunningTime="2025-10-13 21:13:52.121530035 +0000 UTC m=+149.039775120" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.130390 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bk9hg"] Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.133311 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.134047 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sptv4\" (UniqueName: \"kubernetes.io/projected/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-kube-api-access-sptv4\") pod \"community-operators-jvx7z\" (UID: \"0ce9ebc5-b00b-4f83-8420-6a58b073efa5\") " pod="openshift-marketplace/community-operators-jvx7z" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.194086 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.194696 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62spr\" (UniqueName: \"kubernetes.io/projected/d02f5e12-ab55-4649-94db-f248e569c2d1-kube-api-access-62spr\") pod \"certified-operators-bk9hg\" (UID: \"d02f5e12-ab55-4649-94db-f248e569c2d1\") " pod="openshift-marketplace/certified-operators-bk9hg" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.194778 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d02f5e12-ab55-4649-94db-f248e569c2d1-catalog-content\") pod \"certified-operators-bk9hg\" (UID: \"d02f5e12-ab55-4649-94db-f248e569c2d1\") " pod="openshift-marketplace/certified-operators-bk9hg" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.194802 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d02f5e12-ab55-4649-94db-f248e569c2d1-utilities\") pod \"certified-operators-bk9hg\" (UID: \"d02f5e12-ab55-4649-94db-f248e569c2d1\") " pod="openshift-marketplace/certified-operators-bk9hg" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.195432 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d02f5e12-ab55-4649-94db-f248e569c2d1-catalog-content\") pod \"certified-operators-bk9hg\" (UID: \"d02f5e12-ab55-4649-94db-f248e569c2d1\") " pod="openshift-marketplace/certified-operators-bk9hg" Oct 13 21:13:52 crc kubenswrapper[4689]: E1013 21:13:52.195532 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:52.695429158 +0000 UTC m=+149.613674243 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.208198 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d02f5e12-ab55-4649-94db-f248e569c2d1-utilities\") pod \"certified-operators-bk9hg\" (UID: \"d02f5e12-ab55-4649-94db-f248e569c2d1\") " pod="openshift-marketplace/certified-operators-bk9hg" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.245675 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nmrjr"] Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.247458 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nmrjr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.280026 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nmrjr"] Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.298518 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62spr\" (UniqueName: \"kubernetes.io/projected/d02f5e12-ab55-4649-94db-f248e569c2d1-kube-api-access-62spr\") pod \"certified-operators-bk9hg\" (UID: \"d02f5e12-ab55-4649-94db-f248e569c2d1\") " pod="openshift-marketplace/certified-operators-bk9hg" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.306814 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ml5l\" (UniqueName: \"kubernetes.io/projected/d6cf5abf-3312-44a9-a2e8-4f11711fce76-kube-api-access-5ml5l\") pod \"community-operators-nmrjr\" (UID: \"d6cf5abf-3312-44a9-a2e8-4f11711fce76\") " pod="openshift-marketplace/community-operators-nmrjr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.307166 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6cf5abf-3312-44a9-a2e8-4f11711fce76-utilities\") pod \"community-operators-nmrjr\" (UID: \"d6cf5abf-3312-44a9-a2e8-4f11711fce76\") " pod="openshift-marketplace/community-operators-nmrjr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.307227 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6cf5abf-3312-44a9-a2e8-4f11711fce76-catalog-content\") pod \"community-operators-nmrjr\" (UID: \"d6cf5abf-3312-44a9-a2e8-4f11711fce76\") " pod="openshift-marketplace/community-operators-nmrjr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.307308 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:52 crc kubenswrapper[4689]: E1013 21:13:52.307762 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:52.807744227 +0000 UTC m=+149.725989312 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.372201 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-h2rlr"] Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.373303 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h2rlr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.384893 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jvx7z" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.411674 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.411907 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d6125f5-c788-40f7-9d1d-9e28239be338-utilities\") pod \"certified-operators-h2rlr\" (UID: \"5d6125f5-c788-40f7-9d1d-9e28239be338\") " pod="openshift-marketplace/certified-operators-h2rlr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.411956 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6cf5abf-3312-44a9-a2e8-4f11711fce76-utilities\") pod \"community-operators-nmrjr\" (UID: \"d6cf5abf-3312-44a9-a2e8-4f11711fce76\") " pod="openshift-marketplace/community-operators-nmrjr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.411979 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6cf5abf-3312-44a9-a2e8-4f11711fce76-catalog-content\") pod \"community-operators-nmrjr\" (UID: \"d6cf5abf-3312-44a9-a2e8-4f11711fce76\") " pod="openshift-marketplace/community-operators-nmrjr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.412011 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2h9q\" (UniqueName: \"kubernetes.io/projected/5d6125f5-c788-40f7-9d1d-9e28239be338-kube-api-access-z2h9q\") pod \"certified-operators-h2rlr\" (UID: \"5d6125f5-c788-40f7-9d1d-9e28239be338\") " pod="openshift-marketplace/certified-operators-h2rlr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.412038 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d6125f5-c788-40f7-9d1d-9e28239be338-catalog-content\") pod \"certified-operators-h2rlr\" (UID: \"5d6125f5-c788-40f7-9d1d-9e28239be338\") " pod="openshift-marketplace/certified-operators-h2rlr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.412069 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ml5l\" (UniqueName: \"kubernetes.io/projected/d6cf5abf-3312-44a9-a2e8-4f11711fce76-kube-api-access-5ml5l\") pod \"community-operators-nmrjr\" (UID: \"d6cf5abf-3312-44a9-a2e8-4f11711fce76\") " pod="openshift-marketplace/community-operators-nmrjr" Oct 13 21:13:52 crc kubenswrapper[4689]: E1013 21:13:52.412527 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:52.912508981 +0000 UTC m=+149.830754066 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.412969 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6cf5abf-3312-44a9-a2e8-4f11711fce76-utilities\") pod \"community-operators-nmrjr\" (UID: \"d6cf5abf-3312-44a9-a2e8-4f11711fce76\") " pod="openshift-marketplace/community-operators-nmrjr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.413189 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6cf5abf-3312-44a9-a2e8-4f11711fce76-catalog-content\") pod \"community-operators-nmrjr\" (UID: \"d6cf5abf-3312-44a9-a2e8-4f11711fce76\") " pod="openshift-marketplace/community-operators-nmrjr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.467732 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h2rlr"] Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.490615 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ml5l\" (UniqueName: \"kubernetes.io/projected/d6cf5abf-3312-44a9-a2e8-4f11711fce76-kube-api-access-5ml5l\") pod \"community-operators-nmrjr\" (UID: \"d6cf5abf-3312-44a9-a2e8-4f11711fce76\") " pod="openshift-marketplace/community-operators-nmrjr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.514460 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d6125f5-c788-40f7-9d1d-9e28239be338-utilities\") pod \"certified-operators-h2rlr\" (UID: \"5d6125f5-c788-40f7-9d1d-9e28239be338\") " pod="openshift-marketplace/certified-operators-h2rlr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.514530 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2h9q\" (UniqueName: \"kubernetes.io/projected/5d6125f5-c788-40f7-9d1d-9e28239be338-kube-api-access-z2h9q\") pod \"certified-operators-h2rlr\" (UID: \"5d6125f5-c788-40f7-9d1d-9e28239be338\") " pod="openshift-marketplace/certified-operators-h2rlr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.514558 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.514577 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d6125f5-c788-40f7-9d1d-9e28239be338-catalog-content\") pod \"certified-operators-h2rlr\" (UID: \"5d6125f5-c788-40f7-9d1d-9e28239be338\") " pod="openshift-marketplace/certified-operators-h2rlr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.515007 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d6125f5-c788-40f7-9d1d-9e28239be338-utilities\") pod \"certified-operators-h2rlr\" (UID: \"5d6125f5-c788-40f7-9d1d-9e28239be338\") " pod="openshift-marketplace/certified-operators-h2rlr" Oct 13 21:13:52 crc kubenswrapper[4689]: E1013 21:13:52.515500 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:53.015481146 +0000 UTC m=+149.933726231 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.516197 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d6125f5-c788-40f7-9d1d-9e28239be338-catalog-content\") pod \"certified-operators-h2rlr\" (UID: \"5d6125f5-c788-40f7-9d1d-9e28239be338\") " pod="openshift-marketplace/certified-operators-h2rlr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.521560 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bk9hg" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.525744 4689 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-q26mq container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.525824 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" podUID="adcf30c3-b406-4735-8488-f8380a7ab1e8" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.26:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.547157 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"c3886e6d3e0e5986ae2c30511a4dff85ae5f7fe560dad339c472019168a9f28a"} Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.558436 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2h9q\" (UniqueName: \"kubernetes.io/projected/5d6125f5-c788-40f7-9d1d-9e28239be338-kube-api-access-z2h9q\") pod \"certified-operators-h2rlr\" (UID: \"5d6125f5-c788-40f7-9d1d-9e28239be338\") " pod="openshift-marketplace/certified-operators-h2rlr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.559537 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-lbpft" event={"ID":"5a4ca39a-9c29-4904-aa12-cca8aff1453a","Type":"ContainerStarted","Data":"5f9cc8d8ccea1a5a30a12cadaf1e604443f736eb91b0dda528ae312b51b941f8"} Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.562444 4689 generic.go:334] "Generic (PLEG): container finished" podID="62c53fad-57ac-4c62-86aa-f73e4e35b1f4" containerID="cbf69b2aebc182bb9237fb099da4904c439d49bc380d5c3ff2770472e256db07" exitCode=0 Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.563230 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" event={"ID":"62c53fad-57ac-4c62-86aa-f73e4e35b1f4","Type":"ContainerDied","Data":"cbf69b2aebc182bb9237fb099da4904c439d49bc380d5c3ff2770472e256db07"} Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.595039 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.618893 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:52 crc kubenswrapper[4689]: E1013 21:13:52.620390 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:53.120360732 +0000 UTC m=+150.038605817 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.624154 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nmrjr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.702399 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h2rlr" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.732819 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:52 crc kubenswrapper[4689]: E1013 21:13:52.733191 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:53.233175745 +0000 UTC m=+150.151420830 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.833951 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:52 crc kubenswrapper[4689]: E1013 21:13:52.834763 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:53.334744622 +0000 UTC m=+150.252989707 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.855241 4689 patch_prober.go:28] interesting pod/router-default-5444994796-nlljk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 21:13:52 crc kubenswrapper[4689]: [-]has-synced failed: reason withheld Oct 13 21:13:52 crc kubenswrapper[4689]: [+]process-running ok Oct 13 21:13:52 crc kubenswrapper[4689]: healthz check failed Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.855310 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nlljk" podUID="89108982-aec6-4c39-a675-508c22d2bf80" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 21:13:52 crc kubenswrapper[4689]: I1013 21:13:52.938448 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:52 crc kubenswrapper[4689]: E1013 21:13:52.938862 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:53.438847818 +0000 UTC m=+150.357092903 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.039799 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:53 crc kubenswrapper[4689]: E1013 21:13:53.040292 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:53.540246999 +0000 UTC m=+150.458492104 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.071067 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-q26mq" Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.151722 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:53 crc kubenswrapper[4689]: E1013 21:13:53.152136 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:53.652121806 +0000 UTC m=+150.570366891 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.257681 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:53 crc kubenswrapper[4689]: E1013 21:13:53.258083 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:53.758064652 +0000 UTC m=+150.676309737 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.331677 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bk9hg"] Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.359198 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:53 crc kubenswrapper[4689]: E1013 21:13:53.361354 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:53.861331985 +0000 UTC m=+150.779577070 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.412203 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jvx7z"] Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.463716 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:53 crc kubenswrapper[4689]: E1013 21:13:53.466847 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:53.966822628 +0000 UTC m=+150.885067713 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.466937 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:53 crc kubenswrapper[4689]: E1013 21:13:53.468145 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:53.968135474 +0000 UTC m=+150.886380559 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.525332 4689 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.586007 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-lbpft" event={"ID":"5a4ca39a-9c29-4904-aa12-cca8aff1453a","Type":"ContainerStarted","Data":"7ebb5681792f4253c5aa5fe0ced9bb97ac81aa37da84ba1d2b2adf7ee49a273e"} Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.586842 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:53 crc kubenswrapper[4689]: E1013 21:13:53.587485 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:54.087466684 +0000 UTC m=+151.005711769 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.598301 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jvx7z" event={"ID":"0ce9ebc5-b00b-4f83-8420-6a58b073efa5","Type":"ContainerStarted","Data":"773c4bb0ad1b9d3251f66979be419292d9554020e445154a0c872187fffe3a8f"} Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.615039 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bk9hg" event={"ID":"d02f5e12-ab55-4649-94db-f248e569c2d1","Type":"ContainerStarted","Data":"c37a8e9d4dd76c2d4bc8f4f35ef47b8899c7c8e35b6c090c41320f30b4947314"} Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.615120 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bk9hg" event={"ID":"d02f5e12-ab55-4649-94db-f248e569c2d1","Type":"ContainerStarted","Data":"5b459ae86c7554e28a826b3467d0668cdeda27f7c40831775d24cea5a319af2f"} Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.618890 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"3986fe3271bcc35d7d8e772263589d04675dee08f8e82cabe73b418d8ad11399"} Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.641915 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"4f9accf164d8baa4adbac44f62b905f572976984f95c85a954a7bce9a676f0c5"} Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.641991 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"a464bf7340e42093735b1297eea945f50d2ca888e7fe4eb833034f1bc5de832d"} Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.653992 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"6422f78733835b4bfe79037648b85edd75b37ae100553948a74d302aeed72ea1"} Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.683226 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h2rlr"] Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.691957 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:53 crc kubenswrapper[4689]: E1013 21:13:53.692312 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:54.19229734 +0000 UTC m=+151.110542425 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.723032 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nxrzk"] Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.724220 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nxrzk" Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.729135 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.733252 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nxrzk"] Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.792745 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:53 crc kubenswrapper[4689]: E1013 21:13:53.794234 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:54.294212196 +0000 UTC m=+151.212457281 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.798188 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nmrjr"] Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.844950 4689 patch_prober.go:28] interesting pod/router-default-5444994796-nlljk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 21:13:53 crc kubenswrapper[4689]: [-]has-synced failed: reason withheld Oct 13 21:13:53 crc kubenswrapper[4689]: [+]process-running ok Oct 13 21:13:53 crc kubenswrapper[4689]: healthz check failed Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.845026 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nlljk" podUID="89108982-aec6-4c39-a675-508c22d2bf80" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.860570 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.860641 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.882737 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tckgt" Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.897333 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.897387 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnbhw\" (UniqueName: \"kubernetes.io/projected/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-kube-api-access-qnbhw\") pod \"redhat-marketplace-nxrzk\" (UID: \"0dabb64b-b7fc-4428-bcfe-10c98dbe797b\") " pod="openshift-marketplace/redhat-marketplace-nxrzk" Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.897411 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-utilities\") pod \"redhat-marketplace-nxrzk\" (UID: \"0dabb64b-b7fc-4428-bcfe-10c98dbe797b\") " pod="openshift-marketplace/redhat-marketplace-nxrzk" Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.897430 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-catalog-content\") pod \"redhat-marketplace-nxrzk\" (UID: \"0dabb64b-b7fc-4428-bcfe-10c98dbe797b\") " pod="openshift-marketplace/redhat-marketplace-nxrzk" Oct 13 21:13:53 crc kubenswrapper[4689]: E1013 21:13:53.898019 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-13 21:13:54.398003953 +0000 UTC m=+151.316249048 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-m87tp" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.998218 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.998548 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnbhw\" (UniqueName: \"kubernetes.io/projected/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-kube-api-access-qnbhw\") pod \"redhat-marketplace-nxrzk\" (UID: \"0dabb64b-b7fc-4428-bcfe-10c98dbe797b\") " pod="openshift-marketplace/redhat-marketplace-nxrzk" Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.998580 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-utilities\") pod \"redhat-marketplace-nxrzk\" (UID: \"0dabb64b-b7fc-4428-bcfe-10c98dbe797b\") " pod="openshift-marketplace/redhat-marketplace-nxrzk" Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.998663 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-catalog-content\") pod \"redhat-marketplace-nxrzk\" (UID: \"0dabb64b-b7fc-4428-bcfe-10c98dbe797b\") " pod="openshift-marketplace/redhat-marketplace-nxrzk" Oct 13 21:13:53 crc kubenswrapper[4689]: E1013 21:13:53.998874 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-13 21:13:54.49884671 +0000 UTC m=+151.417091795 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.999164 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-catalog-content\") pod \"redhat-marketplace-nxrzk\" (UID: \"0dabb64b-b7fc-4428-bcfe-10c98dbe797b\") " pod="openshift-marketplace/redhat-marketplace-nxrzk" Oct 13 21:13:53 crc kubenswrapper[4689]: I1013 21:13:53.999456 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-utilities\") pod \"redhat-marketplace-nxrzk\" (UID: \"0dabb64b-b7fc-4428-bcfe-10c98dbe797b\") " pod="openshift-marketplace/redhat-marketplace-nxrzk" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.004776 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.037535 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnbhw\" (UniqueName: \"kubernetes.io/projected/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-kube-api-access-qnbhw\") pod \"redhat-marketplace-nxrzk\" (UID: \"0dabb64b-b7fc-4428-bcfe-10c98dbe797b\") " pod="openshift-marketplace/redhat-marketplace-nxrzk" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.061887 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nxrzk" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.074042 4689 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-13T21:13:53.525368273Z","Handler":null,"Name":""} Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.078239 4689 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.078291 4689 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.101691 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pxhk\" (UniqueName: \"kubernetes.io/projected/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-kube-api-access-6pxhk\") pod \"62c53fad-57ac-4c62-86aa-f73e4e35b1f4\" (UID: \"62c53fad-57ac-4c62-86aa-f73e4e35b1f4\") " Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.101961 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-secret-volume\") pod \"62c53fad-57ac-4c62-86aa-f73e4e35b1f4\" (UID: \"62c53fad-57ac-4c62-86aa-f73e4e35b1f4\") " Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.102083 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-config-volume\") pod \"62c53fad-57ac-4c62-86aa-f73e4e35b1f4\" (UID: \"62c53fad-57ac-4c62-86aa-f73e4e35b1f4\") " Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.102285 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.104083 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-config-volume" (OuterVolumeSpecName: "config-volume") pod "62c53fad-57ac-4c62-86aa-f73e4e35b1f4" (UID: "62c53fad-57ac-4c62-86aa-f73e4e35b1f4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.110217 4689 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.110276 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.112422 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "62c53fad-57ac-4c62-86aa-f73e4e35b1f4" (UID: "62c53fad-57ac-4c62-86aa-f73e4e35b1f4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.115712 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-w2hcj"] Oct 13 21:13:54 crc kubenswrapper[4689]: E1013 21:13:54.115991 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62c53fad-57ac-4c62-86aa-f73e4e35b1f4" containerName="collect-profiles" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.116009 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="62c53fad-57ac-4c62-86aa-f73e4e35b1f4" containerName="collect-profiles" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.116159 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="62c53fad-57ac-4c62-86aa-f73e4e35b1f4" containerName="collect-profiles" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.117173 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w2hcj" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.119409 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-kube-api-access-6pxhk" (OuterVolumeSpecName: "kube-api-access-6pxhk") pod "62c53fad-57ac-4c62-86aa-f73e4e35b1f4" (UID: "62c53fad-57ac-4c62-86aa-f73e4e35b1f4"). InnerVolumeSpecName "kube-api-access-6pxhk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.124445 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w2hcj"] Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.142446 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-m87tp\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.151495 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.203855 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.204159 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4940641e-0604-4623-80d6-62fbc4187027-utilities\") pod \"redhat-marketplace-w2hcj\" (UID: \"4940641e-0604-4623-80d6-62fbc4187027\") " pod="openshift-marketplace/redhat-marketplace-w2hcj" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.204257 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4940641e-0604-4623-80d6-62fbc4187027-catalog-content\") pod \"redhat-marketplace-w2hcj\" (UID: \"4940641e-0604-4623-80d6-62fbc4187027\") " pod="openshift-marketplace/redhat-marketplace-w2hcj" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.204301 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4z294\" (UniqueName: \"kubernetes.io/projected/4940641e-0604-4623-80d6-62fbc4187027-kube-api-access-4z294\") pod \"redhat-marketplace-w2hcj\" (UID: \"4940641e-0604-4623-80d6-62fbc4187027\") " pod="openshift-marketplace/redhat-marketplace-w2hcj" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.204372 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pxhk\" (UniqueName: \"kubernetes.io/projected/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-kube-api-access-6pxhk\") on node \"crc\" DevicePath \"\"" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.204389 4689 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.204402 4689 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/62c53fad-57ac-4c62-86aa-f73e4e35b1f4-config-volume\") on node \"crc\" DevicePath \"\"" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.216680 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.306010 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4940641e-0604-4623-80d6-62fbc4187027-catalog-content\") pod \"redhat-marketplace-w2hcj\" (UID: \"4940641e-0604-4623-80d6-62fbc4187027\") " pod="openshift-marketplace/redhat-marketplace-w2hcj" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.306070 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4z294\" (UniqueName: \"kubernetes.io/projected/4940641e-0604-4623-80d6-62fbc4187027-kube-api-access-4z294\") pod \"redhat-marketplace-w2hcj\" (UID: \"4940641e-0604-4623-80d6-62fbc4187027\") " pod="openshift-marketplace/redhat-marketplace-w2hcj" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.306124 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4940641e-0604-4623-80d6-62fbc4187027-utilities\") pod \"redhat-marketplace-w2hcj\" (UID: \"4940641e-0604-4623-80d6-62fbc4187027\") " pod="openshift-marketplace/redhat-marketplace-w2hcj" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.306668 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4940641e-0604-4623-80d6-62fbc4187027-catalog-content\") pod \"redhat-marketplace-w2hcj\" (UID: \"4940641e-0604-4623-80d6-62fbc4187027\") " pod="openshift-marketplace/redhat-marketplace-w2hcj" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.306730 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4940641e-0604-4623-80d6-62fbc4187027-utilities\") pod \"redhat-marketplace-w2hcj\" (UID: \"4940641e-0604-4623-80d6-62fbc4187027\") " pod="openshift-marketplace/redhat-marketplace-w2hcj" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.329374 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4z294\" (UniqueName: \"kubernetes.io/projected/4940641e-0604-4623-80d6-62fbc4187027-kube-api-access-4z294\") pod \"redhat-marketplace-w2hcj\" (UID: \"4940641e-0604-4623-80d6-62fbc4187027\") " pod="openshift-marketplace/redhat-marketplace-w2hcj" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.330629 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nxrzk"] Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.443848 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m87tp"] Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.534635 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w2hcj" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.659973 4689 generic.go:334] "Generic (PLEG): container finished" podID="d6cf5abf-3312-44a9-a2e8-4f11711fce76" containerID="1c80ceed9b1e971821a21be9d2702f6ebfc603f842a8396d23d09a21710d61af" exitCode=0 Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.660019 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nmrjr" event={"ID":"d6cf5abf-3312-44a9-a2e8-4f11711fce76","Type":"ContainerDied","Data":"1c80ceed9b1e971821a21be9d2702f6ebfc603f842a8396d23d09a21710d61af"} Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.660432 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nmrjr" event={"ID":"d6cf5abf-3312-44a9-a2e8-4f11711fce76","Type":"ContainerStarted","Data":"30b24c642e0072f9a794732b028cad7bf2c06e63f4821b1696ca5215f9662acb"} Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.662281 4689 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.662608 4689 generic.go:334] "Generic (PLEG): container finished" podID="5d6125f5-c788-40f7-9d1d-9e28239be338" containerID="71eb5f11e3d4e172d3db159b74a9b2b4986b94ccf2d7601bd4e28f68aff42fc6" exitCode=0 Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.662687 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h2rlr" event={"ID":"5d6125f5-c788-40f7-9d1d-9e28239be338","Type":"ContainerDied","Data":"71eb5f11e3d4e172d3db159b74a9b2b4986b94ccf2d7601bd4e28f68aff42fc6"} Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.662716 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h2rlr" event={"ID":"5d6125f5-c788-40f7-9d1d-9e28239be338","Type":"ContainerStarted","Data":"146760d6720c978289c242d011b91beb4f1f8bcf10b6db209b7fb7228b5d790c"} Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.673705 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-lbpft" event={"ID":"5a4ca39a-9c29-4904-aa12-cca8aff1453a","Type":"ContainerStarted","Data":"063758ec538d6f88bb3d6b4567e9fad4c4dc32dcbcc39c1f09473659b2117f2d"} Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.679227 4689 generic.go:334] "Generic (PLEG): container finished" podID="0ce9ebc5-b00b-4f83-8420-6a58b073efa5" containerID="b148be00cde7d47e0a98ee3f594696f512f3ebcba26a60060d6e17eaf36d6eae" exitCode=0 Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.679262 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jvx7z" event={"ID":"0ce9ebc5-b00b-4f83-8420-6a58b073efa5","Type":"ContainerDied","Data":"b148be00cde7d47e0a98ee3f594696f512f3ebcba26a60060d6e17eaf36d6eae"} Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.684011 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" event={"ID":"2c78b93f-8347-4c41-a948-bacab534efdf","Type":"ContainerStarted","Data":"8bea0beac10e5a775a987bdbca0b7dba803b39c4d2439732caed884c66672cbf"} Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.686958 4689 generic.go:334] "Generic (PLEG): container finished" podID="0dabb64b-b7fc-4428-bcfe-10c98dbe797b" containerID="fe02a009ceab8edba50acf2decc0f02ac800cf102dc3290147bb1bd1ff7ae594" exitCode=0 Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.687029 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nxrzk" event={"ID":"0dabb64b-b7fc-4428-bcfe-10c98dbe797b","Type":"ContainerDied","Data":"fe02a009ceab8edba50acf2decc0f02ac800cf102dc3290147bb1bd1ff7ae594"} Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.687065 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nxrzk" event={"ID":"0dabb64b-b7fc-4428-bcfe-10c98dbe797b","Type":"ContainerStarted","Data":"0aa98261fad351cee85a8aeb3168224d50f2f0427dcc62f18121adbe83715093"} Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.690265 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" event={"ID":"62c53fad-57ac-4c62-86aa-f73e4e35b1f4","Type":"ContainerDied","Data":"9405df9ec5b92c8a61815aac6017fe3545f89cf18bc51e8370c6c1c6aaa81610"} Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.690392 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9405df9ec5b92c8a61815aac6017fe3545f89cf18bc51e8370c6c1c6aaa81610" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.690533 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.695868 4689 generic.go:334] "Generic (PLEG): container finished" podID="d02f5e12-ab55-4649-94db-f248e569c2d1" containerID="c37a8e9d4dd76c2d4bc8f4f35ef47b8899c7c8e35b6c090c41320f30b4947314" exitCode=0 Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.695950 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bk9hg" event={"ID":"d02f5e12-ab55-4649-94db-f248e569c2d1","Type":"ContainerDied","Data":"c37a8e9d4dd76c2d4bc8f4f35ef47b8899c7c8e35b6c090c41320f30b4947314"} Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.702052 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"017dbe140bce5aa98053c6af120ed0ff102f97aec7e6eab864ecb56f24975e78"} Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.824674 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-lbpft" podStartSLOduration=12.824651263 podStartE2EDuration="12.824651263s" podCreationTimestamp="2025-10-13 21:13:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:54.718371298 +0000 UTC m=+151.636616383" watchObservedRunningTime="2025-10-13 21:13:54.824651263 +0000 UTC m=+151.742896368" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.846471 4689 patch_prober.go:28] interesting pod/router-default-5444994796-nlljk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 21:13:54 crc kubenswrapper[4689]: [-]has-synced failed: reason withheld Oct 13 21:13:54 crc kubenswrapper[4689]: [+]process-running ok Oct 13 21:13:54 crc kubenswrapper[4689]: healthz check failed Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.846547 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nlljk" podUID="89108982-aec6-4c39-a675-508c22d2bf80" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.867606 4689 patch_prober.go:28] interesting pod/downloads-7954f5f757-xsgbv container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.867654 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-xsgbv" podUID="1f429fdd-330f-4526-9fcd-fb6293286256" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.867791 4689 patch_prober.go:28] interesting pod/downloads-7954f5f757-xsgbv container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.867848 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-xsgbv" podUID="1f429fdd-330f-4526-9fcd-fb6293286256" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.895269 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.895824 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.897194 4689 patch_prober.go:28] interesting pod/console-f9d7485db-z5vx4 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.897255 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-z5vx4" podUID="0d4c9845-75c1-43df-b20c-2e90d4830d84" containerName="console" probeResult="failure" output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" Oct 13 21:13:54 crc kubenswrapper[4689]: I1013 21:13:54.974111 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w2hcj"] Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.116825 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-smc4t"] Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.117972 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-smc4t" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.119671 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.125849 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-smc4t"] Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.225461 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8df17524-724f-469d-a215-26d230f8d2ce-catalog-content\") pod \"redhat-operators-smc4t\" (UID: \"8df17524-724f-469d-a215-26d230f8d2ce\") " pod="openshift-marketplace/redhat-operators-smc4t" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.225551 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh74k\" (UniqueName: \"kubernetes.io/projected/8df17524-724f-469d-a215-26d230f8d2ce-kube-api-access-fh74k\") pod \"redhat-operators-smc4t\" (UID: \"8df17524-724f-469d-a215-26d230f8d2ce\") " pod="openshift-marketplace/redhat-operators-smc4t" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.225605 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8df17524-724f-469d-a215-26d230f8d2ce-utilities\") pod \"redhat-operators-smc4t\" (UID: \"8df17524-724f-469d-a215-26d230f8d2ce\") " pod="openshift-marketplace/redhat-operators-smc4t" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.326936 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8df17524-724f-469d-a215-26d230f8d2ce-catalog-content\") pod \"redhat-operators-smc4t\" (UID: \"8df17524-724f-469d-a215-26d230f8d2ce\") " pod="openshift-marketplace/redhat-operators-smc4t" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.327047 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh74k\" (UniqueName: \"kubernetes.io/projected/8df17524-724f-469d-a215-26d230f8d2ce-kube-api-access-fh74k\") pod \"redhat-operators-smc4t\" (UID: \"8df17524-724f-469d-a215-26d230f8d2ce\") " pod="openshift-marketplace/redhat-operators-smc4t" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.327085 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8df17524-724f-469d-a215-26d230f8d2ce-utilities\") pod \"redhat-operators-smc4t\" (UID: \"8df17524-724f-469d-a215-26d230f8d2ce\") " pod="openshift-marketplace/redhat-operators-smc4t" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.328226 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8df17524-724f-469d-a215-26d230f8d2ce-utilities\") pod \"redhat-operators-smc4t\" (UID: \"8df17524-724f-469d-a215-26d230f8d2ce\") " pod="openshift-marketplace/redhat-operators-smc4t" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.328298 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8df17524-724f-469d-a215-26d230f8d2ce-catalog-content\") pod \"redhat-operators-smc4t\" (UID: \"8df17524-724f-469d-a215-26d230f8d2ce\") " pod="openshift-marketplace/redhat-operators-smc4t" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.352411 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh74k\" (UniqueName: \"kubernetes.io/projected/8df17524-724f-469d-a215-26d230f8d2ce-kube-api-access-fh74k\") pod \"redhat-operators-smc4t\" (UID: \"8df17524-724f-469d-a215-26d230f8d2ce\") " pod="openshift-marketplace/redhat-operators-smc4t" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.374327 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.374386 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.387701 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.436084 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-smc4t" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.512009 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.512077 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.515964 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.516782 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.519660 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.520109 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.521460 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hlhzr"] Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.522542 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hlhzr" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.523698 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.528893 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.554790 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hlhzr"] Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.631110 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-catalog-content\") pod \"redhat-operators-hlhzr\" (UID: \"d3c3a8ce-5574-4dd5-acff-b56b6c188b44\") " pod="openshift-marketplace/redhat-operators-hlhzr" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.633916 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mqjk\" (UniqueName: \"kubernetes.io/projected/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-kube-api-access-5mqjk\") pod \"redhat-operators-hlhzr\" (UID: \"d3c3a8ce-5574-4dd5-acff-b56b6c188b44\") " pod="openshift-marketplace/redhat-operators-hlhzr" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.633988 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/20d37033-06c5-4468-bfb1-35459bf71b15-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"20d37033-06c5-4468-bfb1-35459bf71b15\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.634018 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/20d37033-06c5-4468-bfb1-35459bf71b15-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"20d37033-06c5-4468-bfb1-35459bf71b15\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.634546 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-utilities\") pod \"redhat-operators-hlhzr\" (UID: \"d3c3a8ce-5574-4dd5-acff-b56b6c188b44\") " pod="openshift-marketplace/redhat-operators-hlhzr" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.718620 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" event={"ID":"2c78b93f-8347-4c41-a948-bacab534efdf","Type":"ContainerStarted","Data":"4bae03277b120ee45b419bfbbda877496d9a50742623443dc767e020df1555a9"} Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.718793 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.723273 4689 generic.go:334] "Generic (PLEG): container finished" podID="4940641e-0604-4623-80d6-62fbc4187027" containerID="600e929a9bdfb214bc041da14517b437c464b83f553784ef5d362aedade7ffb4" exitCode=0 Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.723435 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w2hcj" event={"ID":"4940641e-0604-4623-80d6-62fbc4187027","Type":"ContainerDied","Data":"600e929a9bdfb214bc041da14517b437c464b83f553784ef5d362aedade7ffb4"} Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.723478 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w2hcj" event={"ID":"4940641e-0604-4623-80d6-62fbc4187027","Type":"ContainerStarted","Data":"ea205f01a45c0185a0a5c205ab9802552f8c82a22efe5621c1d7cba54a177da3"} Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.725342 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.737234 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-utilities\") pod \"redhat-operators-hlhzr\" (UID: \"d3c3a8ce-5574-4dd5-acff-b56b6c188b44\") " pod="openshift-marketplace/redhat-operators-hlhzr" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.737303 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-catalog-content\") pod \"redhat-operators-hlhzr\" (UID: \"d3c3a8ce-5574-4dd5-acff-b56b6c188b44\") " pod="openshift-marketplace/redhat-operators-hlhzr" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.737343 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mqjk\" (UniqueName: \"kubernetes.io/projected/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-kube-api-access-5mqjk\") pod \"redhat-operators-hlhzr\" (UID: \"d3c3a8ce-5574-4dd5-acff-b56b6c188b44\") " pod="openshift-marketplace/redhat-operators-hlhzr" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.737368 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/20d37033-06c5-4468-bfb1-35459bf71b15-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"20d37033-06c5-4468-bfb1-35459bf71b15\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.737385 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/20d37033-06c5-4468-bfb1-35459bf71b15-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"20d37033-06c5-4468-bfb1-35459bf71b15\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.737452 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/20d37033-06c5-4468-bfb1-35459bf71b15-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"20d37033-06c5-4468-bfb1-35459bf71b15\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.738321 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" podStartSLOduration=130.738311949 podStartE2EDuration="2m10.738311949s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:55.737860677 +0000 UTC m=+152.656105762" watchObservedRunningTime="2025-10-13 21:13:55.738311949 +0000 UTC m=+152.656557034" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.739343 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-catalog-content\") pod \"redhat-operators-hlhzr\" (UID: \"d3c3a8ce-5574-4dd5-acff-b56b6c188b44\") " pod="openshift-marketplace/redhat-operators-hlhzr" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.740254 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-j5nvb" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.739224 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-utilities\") pod \"redhat-operators-hlhzr\" (UID: \"d3c3a8ce-5574-4dd5-acff-b56b6c188b44\") " pod="openshift-marketplace/redhat-operators-hlhzr" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.747726 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvrs7" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.754079 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-smc4t"] Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.759968 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mqjk\" (UniqueName: \"kubernetes.io/projected/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-kube-api-access-5mqjk\") pod \"redhat-operators-hlhzr\" (UID: \"d3c3a8ce-5574-4dd5-acff-b56b6c188b44\") " pod="openshift-marketplace/redhat-operators-hlhzr" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.760328 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/20d37033-06c5-4468-bfb1-35459bf71b15-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"20d37033-06c5-4468-bfb1-35459bf71b15\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.839094 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.840733 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.886404 4689 patch_prober.go:28] interesting pod/router-default-5444994796-nlljk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 21:13:55 crc kubenswrapper[4689]: [-]has-synced failed: reason withheld Oct 13 21:13:55 crc kubenswrapper[4689]: [+]process-running ok Oct 13 21:13:55 crc kubenswrapper[4689]: healthz check failed Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.893247 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hlhzr" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.895439 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nlljk" podUID="89108982-aec6-4c39-a675-508c22d2bf80" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 21:13:55 crc kubenswrapper[4689]: I1013 21:13:55.961778 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 13 21:13:56 crc kubenswrapper[4689]: I1013 21:13:56.406715 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hlhzr"] Oct 13 21:13:56 crc kubenswrapper[4689]: W1013 21:13:56.430474 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd3c3a8ce_5574_4dd5_acff_b56b6c188b44.slice/crio-c99637177f768aed123e2403a60a18bfcbcaf900c0ad46bb90a6584f20d78a66 WatchSource:0}: Error finding container c99637177f768aed123e2403a60a18bfcbcaf900c0ad46bb90a6584f20d78a66: Status 404 returned error can't find the container with id c99637177f768aed123e2403a60a18bfcbcaf900c0ad46bb90a6584f20d78a66 Oct 13 21:13:56 crc kubenswrapper[4689]: I1013 21:13:56.538168 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 13 21:13:56 crc kubenswrapper[4689]: W1013 21:13:56.587644 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod20d37033_06c5_4468_bfb1_35459bf71b15.slice/crio-bbdd0922e64286b2a0264bfee7fe82a7d947f49dc4d7b5b32bcbfd485383de8e WatchSource:0}: Error finding container bbdd0922e64286b2a0264bfee7fe82a7d947f49dc4d7b5b32bcbfd485383de8e: Status 404 returned error can't find the container with id bbdd0922e64286b2a0264bfee7fe82a7d947f49dc4d7b5b32bcbfd485383de8e Oct 13 21:13:56 crc kubenswrapper[4689]: I1013 21:13:56.751465 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"20d37033-06c5-4468-bfb1-35459bf71b15","Type":"ContainerStarted","Data":"bbdd0922e64286b2a0264bfee7fe82a7d947f49dc4d7b5b32bcbfd485383de8e"} Oct 13 21:13:56 crc kubenswrapper[4689]: I1013 21:13:56.754935 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hlhzr" event={"ID":"d3c3a8ce-5574-4dd5-acff-b56b6c188b44","Type":"ContainerStarted","Data":"c99637177f768aed123e2403a60a18bfcbcaf900c0ad46bb90a6584f20d78a66"} Oct 13 21:13:56 crc kubenswrapper[4689]: I1013 21:13:56.766506 4689 generic.go:334] "Generic (PLEG): container finished" podID="8df17524-724f-469d-a215-26d230f8d2ce" containerID="a60168f61fb8773f94ce69ea551a833497b0ca72f98f9900eb59090accfbee36" exitCode=0 Oct 13 21:13:56 crc kubenswrapper[4689]: I1013 21:13:56.766633 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smc4t" event={"ID":"8df17524-724f-469d-a215-26d230f8d2ce","Type":"ContainerDied","Data":"a60168f61fb8773f94ce69ea551a833497b0ca72f98f9900eb59090accfbee36"} Oct 13 21:13:56 crc kubenswrapper[4689]: I1013 21:13:56.766692 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smc4t" event={"ID":"8df17524-724f-469d-a215-26d230f8d2ce","Type":"ContainerStarted","Data":"cebd5db9fd2258ec62b0c02c47d36b01fd3bc427fbaa8fb4cffec66400ea0c86"} Oct 13 21:13:56 crc kubenswrapper[4689]: I1013 21:13:56.844226 4689 patch_prober.go:28] interesting pod/router-default-5444994796-nlljk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 13 21:13:56 crc kubenswrapper[4689]: [-]has-synced failed: reason withheld Oct 13 21:13:56 crc kubenswrapper[4689]: [+]process-running ok Oct 13 21:13:56 crc kubenswrapper[4689]: healthz check failed Oct 13 21:13:56 crc kubenswrapper[4689]: I1013 21:13:56.844312 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nlljk" podUID="89108982-aec6-4c39-a675-508c22d2bf80" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 13 21:13:57 crc kubenswrapper[4689]: I1013 21:13:57.788676 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"20d37033-06c5-4468-bfb1-35459bf71b15","Type":"ContainerStarted","Data":"fbc24277249a5b51852a00997d5131d2ad2b500bb1b0dd6249af5ba6508a6d22"} Oct 13 21:13:57 crc kubenswrapper[4689]: I1013 21:13:57.802045 4689 generic.go:334] "Generic (PLEG): container finished" podID="d3c3a8ce-5574-4dd5-acff-b56b6c188b44" containerID="05f40238f2a06a34715c9b261fc3d49a62e8e3ba1b56095705176090fba35c54" exitCode=0 Oct 13 21:13:57 crc kubenswrapper[4689]: I1013 21:13:57.802119 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hlhzr" event={"ID":"d3c3a8ce-5574-4dd5-acff-b56b6c188b44","Type":"ContainerDied","Data":"05f40238f2a06a34715c9b261fc3d49a62e8e3ba1b56095705176090fba35c54"} Oct 13 21:13:57 crc kubenswrapper[4689]: I1013 21:13:57.811911 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.8118859990000002 podStartE2EDuration="2.811885999s" podCreationTimestamp="2025-10-13 21:13:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:13:57.806073781 +0000 UTC m=+154.724318866" watchObservedRunningTime="2025-10-13 21:13:57.811885999 +0000 UTC m=+154.730131084" Oct 13 21:13:57 crc kubenswrapper[4689]: I1013 21:13:57.846826 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:57 crc kubenswrapper[4689]: I1013 21:13:57.852991 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-nlljk" Oct 13 21:13:57 crc kubenswrapper[4689]: I1013 21:13:57.991369 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 13 21:13:57 crc kubenswrapper[4689]: I1013 21:13:57.993513 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 21:13:57 crc kubenswrapper[4689]: I1013 21:13:57.997479 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 13 21:13:57 crc kubenswrapper[4689]: I1013 21:13:57.997742 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 13 21:13:58 crc kubenswrapper[4689]: I1013 21:13:58.001648 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 13 21:13:58 crc kubenswrapper[4689]: I1013 21:13:58.121505 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/915ec69a-0839-4a93-9d1e-d79ae5620ae2-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"915ec69a-0839-4a93-9d1e-d79ae5620ae2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 21:13:58 crc kubenswrapper[4689]: I1013 21:13:58.121601 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/915ec69a-0839-4a93-9d1e-d79ae5620ae2-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"915ec69a-0839-4a93-9d1e-d79ae5620ae2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 21:13:58 crc kubenswrapper[4689]: I1013 21:13:58.248739 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/915ec69a-0839-4a93-9d1e-d79ae5620ae2-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"915ec69a-0839-4a93-9d1e-d79ae5620ae2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 21:13:58 crc kubenswrapper[4689]: I1013 21:13:58.248896 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/915ec69a-0839-4a93-9d1e-d79ae5620ae2-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"915ec69a-0839-4a93-9d1e-d79ae5620ae2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 21:13:58 crc kubenswrapper[4689]: I1013 21:13:58.248975 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/915ec69a-0839-4a93-9d1e-d79ae5620ae2-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"915ec69a-0839-4a93-9d1e-d79ae5620ae2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 21:13:58 crc kubenswrapper[4689]: I1013 21:13:58.276326 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/915ec69a-0839-4a93-9d1e-d79ae5620ae2-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"915ec69a-0839-4a93-9d1e-d79ae5620ae2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 21:13:58 crc kubenswrapper[4689]: I1013 21:13:58.323465 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 21:13:58 crc kubenswrapper[4689]: I1013 21:13:58.814457 4689 generic.go:334] "Generic (PLEG): container finished" podID="20d37033-06c5-4468-bfb1-35459bf71b15" containerID="fbc24277249a5b51852a00997d5131d2ad2b500bb1b0dd6249af5ba6508a6d22" exitCode=0 Oct 13 21:13:58 crc kubenswrapper[4689]: I1013 21:13:58.814604 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"20d37033-06c5-4468-bfb1-35459bf71b15","Type":"ContainerDied","Data":"fbc24277249a5b51852a00997d5131d2ad2b500bb1b0dd6249af5ba6508a6d22"} Oct 13 21:13:58 crc kubenswrapper[4689]: I1013 21:13:58.834136 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:13:58 crc kubenswrapper[4689]: I1013 21:13:58.905324 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 13 21:13:58 crc kubenswrapper[4689]: W1013 21:13:58.961438 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod915ec69a_0839_4a93_9d1e_d79ae5620ae2.slice/crio-cf4b70d30a63370a9f9d77fcc741858fdf14944ed3a815cfe9b38523fda4354f WatchSource:0}: Error finding container cf4b70d30a63370a9f9d77fcc741858fdf14944ed3a815cfe9b38523fda4354f: Status 404 returned error can't find the container with id cf4b70d30a63370a9f9d77fcc741858fdf14944ed3a815cfe9b38523fda4354f Oct 13 21:13:59 crc kubenswrapper[4689]: I1013 21:13:59.839172 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"915ec69a-0839-4a93-9d1e-d79ae5620ae2","Type":"ContainerStarted","Data":"cf4b70d30a63370a9f9d77fcc741858fdf14944ed3a815cfe9b38523fda4354f"} Oct 13 21:14:00 crc kubenswrapper[4689]: I1013 21:14:00.426534 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 21:14:00 crc kubenswrapper[4689]: I1013 21:14:00.496214 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/20d37033-06c5-4468-bfb1-35459bf71b15-kubelet-dir\") pod \"20d37033-06c5-4468-bfb1-35459bf71b15\" (UID: \"20d37033-06c5-4468-bfb1-35459bf71b15\") " Oct 13 21:14:00 crc kubenswrapper[4689]: I1013 21:14:00.496303 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/20d37033-06c5-4468-bfb1-35459bf71b15-kube-api-access\") pod \"20d37033-06c5-4468-bfb1-35459bf71b15\" (UID: \"20d37033-06c5-4468-bfb1-35459bf71b15\") " Oct 13 21:14:00 crc kubenswrapper[4689]: I1013 21:14:00.497437 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/20d37033-06c5-4468-bfb1-35459bf71b15-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "20d37033-06c5-4468-bfb1-35459bf71b15" (UID: "20d37033-06c5-4468-bfb1-35459bf71b15"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:14:00 crc kubenswrapper[4689]: I1013 21:14:00.504940 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20d37033-06c5-4468-bfb1-35459bf71b15-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "20d37033-06c5-4468-bfb1-35459bf71b15" (UID: "20d37033-06c5-4468-bfb1-35459bf71b15"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:14:00 crc kubenswrapper[4689]: I1013 21:14:00.598208 4689 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/20d37033-06c5-4468-bfb1-35459bf71b15-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 13 21:14:00 crc kubenswrapper[4689]: I1013 21:14:00.598257 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/20d37033-06c5-4468-bfb1-35459bf71b15-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 13 21:14:00 crc kubenswrapper[4689]: I1013 21:14:00.869716 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"915ec69a-0839-4a93-9d1e-d79ae5620ae2","Type":"ContainerDied","Data":"e59d3e3f76571604fb6b1c33a6b2e9c6772362f727f65800612f3f989e539be1"} Oct 13 21:14:00 crc kubenswrapper[4689]: I1013 21:14:00.869547 4689 generic.go:334] "Generic (PLEG): container finished" podID="915ec69a-0839-4a93-9d1e-d79ae5620ae2" containerID="e59d3e3f76571604fb6b1c33a6b2e9c6772362f727f65800612f3f989e539be1" exitCode=0 Oct 13 21:14:00 crc kubenswrapper[4689]: I1013 21:14:00.906127 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"20d37033-06c5-4468-bfb1-35459bf71b15","Type":"ContainerDied","Data":"bbdd0922e64286b2a0264bfee7fe82a7d947f49dc4d7b5b32bcbfd485383de8e"} Oct 13 21:14:00 crc kubenswrapper[4689]: I1013 21:14:00.906177 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 13 21:14:00 crc kubenswrapper[4689]: I1013 21:14:00.906198 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bbdd0922e64286b2a0264bfee7fe82a7d947f49dc4d7b5b32bcbfd485383de8e" Oct 13 21:14:01 crc kubenswrapper[4689]: I1013 21:14:01.003630 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-fw54p" Oct 13 21:14:04 crc kubenswrapper[4689]: I1013 21:14:04.875536 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-xsgbv" Oct 13 21:14:04 crc kubenswrapper[4689]: I1013 21:14:04.895222 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:14:04 crc kubenswrapper[4689]: I1013 21:14:04.900004 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:14:07 crc kubenswrapper[4689]: I1013 21:14:07.923916 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs\") pod \"network-metrics-daemon-nffnw\" (UID: \"3f5bb2ee-abeb-4342-929a-d61e89f30351\") " pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:14:07 crc kubenswrapper[4689]: I1013 21:14:07.930279 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3f5bb2ee-abeb-4342-929a-d61e89f30351-metrics-certs\") pod \"network-metrics-daemon-nffnw\" (UID: \"3f5bb2ee-abeb-4342-929a-d61e89f30351\") " pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:14:08 crc kubenswrapper[4689]: I1013 21:14:08.094600 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nffnw" Oct 13 21:14:08 crc kubenswrapper[4689]: I1013 21:14:08.612627 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 21:14:08 crc kubenswrapper[4689]: I1013 21:14:08.737064 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/915ec69a-0839-4a93-9d1e-d79ae5620ae2-kubelet-dir\") pod \"915ec69a-0839-4a93-9d1e-d79ae5620ae2\" (UID: \"915ec69a-0839-4a93-9d1e-d79ae5620ae2\") " Oct 13 21:14:08 crc kubenswrapper[4689]: I1013 21:14:08.737163 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/915ec69a-0839-4a93-9d1e-d79ae5620ae2-kube-api-access\") pod \"915ec69a-0839-4a93-9d1e-d79ae5620ae2\" (UID: \"915ec69a-0839-4a93-9d1e-d79ae5620ae2\") " Oct 13 21:14:08 crc kubenswrapper[4689]: I1013 21:14:08.737263 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/915ec69a-0839-4a93-9d1e-d79ae5620ae2-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "915ec69a-0839-4a93-9d1e-d79ae5620ae2" (UID: "915ec69a-0839-4a93-9d1e-d79ae5620ae2"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:14:08 crc kubenswrapper[4689]: I1013 21:14:08.737551 4689 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/915ec69a-0839-4a93-9d1e-d79ae5620ae2-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 13 21:14:08 crc kubenswrapper[4689]: I1013 21:14:08.750184 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/915ec69a-0839-4a93-9d1e-d79ae5620ae2-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "915ec69a-0839-4a93-9d1e-d79ae5620ae2" (UID: "915ec69a-0839-4a93-9d1e-d79ae5620ae2"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:14:08 crc kubenswrapper[4689]: I1013 21:14:08.838900 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/915ec69a-0839-4a93-9d1e-d79ae5620ae2-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 13 21:14:08 crc kubenswrapper[4689]: I1013 21:14:08.973043 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"915ec69a-0839-4a93-9d1e-d79ae5620ae2","Type":"ContainerDied","Data":"cf4b70d30a63370a9f9d77fcc741858fdf14944ed3a815cfe9b38523fda4354f"} Oct 13 21:14:08 crc kubenswrapper[4689]: I1013 21:14:08.973131 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf4b70d30a63370a9f9d77fcc741858fdf14944ed3a815cfe9b38523fda4354f" Oct 13 21:14:08 crc kubenswrapper[4689]: I1013 21:14:08.973167 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 13 21:14:14 crc kubenswrapper[4689]: I1013 21:14:14.157491 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:14:23 crc kubenswrapper[4689]: I1013 21:14:23.859316 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:14:23 crc kubenswrapper[4689]: I1013 21:14:23.860075 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:14:25 crc kubenswrapper[4689]: E1013 21:14:25.444546 4689 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 13 21:14:25 crc kubenswrapper[4689]: E1013 21:14:25.445642 4689 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4z294,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-w2hcj_openshift-marketplace(4940641e-0604-4623-80d6-62fbc4187027): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 13 21:14:25 crc kubenswrapper[4689]: E1013 21:14:25.446867 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-w2hcj" podUID="4940641e-0604-4623-80d6-62fbc4187027" Oct 13 21:14:25 crc kubenswrapper[4689]: I1013 21:14:25.914135 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2kfvc" Oct 13 21:14:28 crc kubenswrapper[4689]: E1013 21:14:28.160857 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-w2hcj" podUID="4940641e-0604-4623-80d6-62fbc4187027" Oct 13 21:14:28 crc kubenswrapper[4689]: E1013 21:14:28.261130 4689 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 13 21:14:28 crc kubenswrapper[4689]: E1013 21:14:28.261332 4689 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fh74k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-smc4t_openshift-marketplace(8df17524-724f-469d-a215-26d230f8d2ce): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 13 21:14:28 crc kubenswrapper[4689]: E1013 21:14:28.262720 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-smc4t" podUID="8df17524-724f-469d-a215-26d230f8d2ce" Oct 13 21:14:28 crc kubenswrapper[4689]: E1013 21:14:28.378337 4689 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 13 21:14:28 crc kubenswrapper[4689]: E1013 21:14:28.378545 4689 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5mqjk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-hlhzr_openshift-marketplace(d3c3a8ce-5574-4dd5-acff-b56b6c188b44): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 13 21:14:28 crc kubenswrapper[4689]: E1013 21:14:28.379768 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-hlhzr" podUID="d3c3a8ce-5574-4dd5-acff-b56b6c188b44" Oct 13 21:14:28 crc kubenswrapper[4689]: E1013 21:14:28.393499 4689 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 13 21:14:28 crc kubenswrapper[4689]: E1013 21:14:28.393762 4689 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qnbhw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-nxrzk_openshift-marketplace(0dabb64b-b7fc-4428-bcfe-10c98dbe797b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 13 21:14:28 crc kubenswrapper[4689]: E1013 21:14:28.394934 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-nxrzk" podUID="0dabb64b-b7fc-4428-bcfe-10c98dbe797b" Oct 13 21:14:29 crc kubenswrapper[4689]: E1013 21:14:29.722846 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-smc4t" podUID="8df17524-724f-469d-a215-26d230f8d2ce" Oct 13 21:14:29 crc kubenswrapper[4689]: E1013 21:14:29.722913 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-hlhzr" podUID="d3c3a8ce-5574-4dd5-acff-b56b6c188b44" Oct 13 21:14:29 crc kubenswrapper[4689]: E1013 21:14:29.723222 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-nxrzk" podUID="0dabb64b-b7fc-4428-bcfe-10c98dbe797b" Oct 13 21:14:29 crc kubenswrapper[4689]: E1013 21:14:29.812397 4689 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 13 21:14:29 crc kubenswrapper[4689]: E1013 21:14:29.812997 4689 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-62spr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-bk9hg_openshift-marketplace(d02f5e12-ab55-4649-94db-f248e569c2d1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 13 21:14:29 crc kubenswrapper[4689]: E1013 21:14:29.814464 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-bk9hg" podUID="d02f5e12-ab55-4649-94db-f248e569c2d1" Oct 13 21:14:29 crc kubenswrapper[4689]: E1013 21:14:29.820933 4689 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 13 21:14:29 crc kubenswrapper[4689]: E1013 21:14:29.821183 4689 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z2h9q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-h2rlr_openshift-marketplace(5d6125f5-c788-40f7-9d1d-9e28239be338): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 13 21:14:29 crc kubenswrapper[4689]: E1013 21:14:29.822768 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-h2rlr" podUID="5d6125f5-c788-40f7-9d1d-9e28239be338" Oct 13 21:14:30 crc kubenswrapper[4689]: I1013 21:14:30.126610 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jvx7z" event={"ID":"0ce9ebc5-b00b-4f83-8420-6a58b073efa5","Type":"ContainerStarted","Data":"2d2158ee76c0ec687aaf144ab9391f12515504570cbc7ff819b71ab6d5fa9e1c"} Oct 13 21:14:30 crc kubenswrapper[4689]: I1013 21:14:30.127283 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-nffnw"] Oct 13 21:14:30 crc kubenswrapper[4689]: I1013 21:14:30.131477 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nmrjr" event={"ID":"d6cf5abf-3312-44a9-a2e8-4f11711fce76","Type":"ContainerStarted","Data":"0252e6e96d0db7b569891423793b9361d0eb4487de5c78c13b23e2b9485761d0"} Oct 13 21:14:30 crc kubenswrapper[4689]: E1013 21:14:30.132011 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-bk9hg" podUID="d02f5e12-ab55-4649-94db-f248e569c2d1" Oct 13 21:14:30 crc kubenswrapper[4689]: E1013 21:14:30.132643 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-h2rlr" podUID="5d6125f5-c788-40f7-9d1d-9e28239be338" Oct 13 21:14:30 crc kubenswrapper[4689]: W1013 21:14:30.132961 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3f5bb2ee_abeb_4342_929a_d61e89f30351.slice/crio-9e5aa5b69125d9ded09539b003f0f4c3c20c05e2e466be4537a39ee6e99c7af7 WatchSource:0}: Error finding container 9e5aa5b69125d9ded09539b003f0f4c3c20c05e2e466be4537a39ee6e99c7af7: Status 404 returned error can't find the container with id 9e5aa5b69125d9ded09539b003f0f4c3c20c05e2e466be4537a39ee6e99c7af7 Oct 13 21:14:31 crc kubenswrapper[4689]: I1013 21:14:31.140390 4689 generic.go:334] "Generic (PLEG): container finished" podID="d6cf5abf-3312-44a9-a2e8-4f11711fce76" containerID="0252e6e96d0db7b569891423793b9361d0eb4487de5c78c13b23e2b9485761d0" exitCode=0 Oct 13 21:14:31 crc kubenswrapper[4689]: I1013 21:14:31.140710 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nmrjr" event={"ID":"d6cf5abf-3312-44a9-a2e8-4f11711fce76","Type":"ContainerDied","Data":"0252e6e96d0db7b569891423793b9361d0eb4487de5c78c13b23e2b9485761d0"} Oct 13 21:14:31 crc kubenswrapper[4689]: I1013 21:14:31.149656 4689 generic.go:334] "Generic (PLEG): container finished" podID="0ce9ebc5-b00b-4f83-8420-6a58b073efa5" containerID="2d2158ee76c0ec687aaf144ab9391f12515504570cbc7ff819b71ab6d5fa9e1c" exitCode=0 Oct 13 21:14:31 crc kubenswrapper[4689]: I1013 21:14:31.149724 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jvx7z" event={"ID":"0ce9ebc5-b00b-4f83-8420-6a58b073efa5","Type":"ContainerDied","Data":"2d2158ee76c0ec687aaf144ab9391f12515504570cbc7ff819b71ab6d5fa9e1c"} Oct 13 21:14:31 crc kubenswrapper[4689]: I1013 21:14:31.154183 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nffnw" event={"ID":"3f5bb2ee-abeb-4342-929a-d61e89f30351","Type":"ContainerStarted","Data":"9007ff6f364fe28b8414e36432785f85024d06bc3775444c44bf7332c7bd3b25"} Oct 13 21:14:31 crc kubenswrapper[4689]: I1013 21:14:31.154331 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nffnw" event={"ID":"3f5bb2ee-abeb-4342-929a-d61e89f30351","Type":"ContainerStarted","Data":"81199ae0e976fff4753c3d6aa416f1797be2a247e7d1281d6ff2e7e53ba816ac"} Oct 13 21:14:31 crc kubenswrapper[4689]: I1013 21:14:31.154358 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nffnw" event={"ID":"3f5bb2ee-abeb-4342-929a-d61e89f30351","Type":"ContainerStarted","Data":"9e5aa5b69125d9ded09539b003f0f4c3c20c05e2e466be4537a39ee6e99c7af7"} Oct 13 21:14:31 crc kubenswrapper[4689]: I1013 21:14:31.219881 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-nffnw" podStartSLOduration=166.219861464 podStartE2EDuration="2m46.219861464s" podCreationTimestamp="2025-10-13 21:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:14:31.190956677 +0000 UTC m=+188.109201762" watchObservedRunningTime="2025-10-13 21:14:31.219861464 +0000 UTC m=+188.138106549" Oct 13 21:14:31 crc kubenswrapper[4689]: I1013 21:14:31.929455 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 13 21:14:32 crc kubenswrapper[4689]: I1013 21:14:32.165207 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jvx7z" event={"ID":"0ce9ebc5-b00b-4f83-8420-6a58b073efa5","Type":"ContainerStarted","Data":"5c1942f4a915ee2044d93c645c9b9464113ee8d701d19bd9bd297b2ae8bbd91b"} Oct 13 21:14:32 crc kubenswrapper[4689]: I1013 21:14:32.168081 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nmrjr" event={"ID":"d6cf5abf-3312-44a9-a2e8-4f11711fce76","Type":"ContainerStarted","Data":"8f7d61f2b8a7aaafcc1a4fc86c819e2b2475a97734506a0e4f8b96ede57a6d04"} Oct 13 21:14:32 crc kubenswrapper[4689]: I1013 21:14:32.198190 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jvx7z" podStartSLOduration=4.158081905 podStartE2EDuration="41.19815771s" podCreationTimestamp="2025-10-13 21:13:51 +0000 UTC" firstStartedPulling="2025-10-13 21:13:54.681488354 +0000 UTC m=+151.599733439" lastFinishedPulling="2025-10-13 21:14:31.721564159 +0000 UTC m=+188.639809244" observedRunningTime="2025-10-13 21:14:32.193109393 +0000 UTC m=+189.111354488" watchObservedRunningTime="2025-10-13 21:14:32.19815771 +0000 UTC m=+189.116402795" Oct 13 21:14:32 crc kubenswrapper[4689]: I1013 21:14:32.220559 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nmrjr" podStartSLOduration=3.300063732 podStartE2EDuration="40.22053564s" podCreationTimestamp="2025-10-13 21:13:52 +0000 UTC" firstStartedPulling="2025-10-13 21:13:54.662022074 +0000 UTC m=+151.580267159" lastFinishedPulling="2025-10-13 21:14:31.582493982 +0000 UTC m=+188.500739067" observedRunningTime="2025-10-13 21:14:32.211618907 +0000 UTC m=+189.129863992" watchObservedRunningTime="2025-10-13 21:14:32.22053564 +0000 UTC m=+189.138780725" Oct 13 21:14:32 crc kubenswrapper[4689]: I1013 21:14:32.386503 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jvx7z" Oct 13 21:14:32 crc kubenswrapper[4689]: I1013 21:14:32.386571 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jvx7z" Oct 13 21:14:32 crc kubenswrapper[4689]: I1013 21:14:32.624816 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nmrjr" Oct 13 21:14:32 crc kubenswrapper[4689]: I1013 21:14:32.624897 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nmrjr" Oct 13 21:14:33 crc kubenswrapper[4689]: I1013 21:14:33.514791 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-jvx7z" podUID="0ce9ebc5-b00b-4f83-8420-6a58b073efa5" containerName="registry-server" probeResult="failure" output=< Oct 13 21:14:33 crc kubenswrapper[4689]: timeout: failed to connect service ":50051" within 1s Oct 13 21:14:33 crc kubenswrapper[4689]: > Oct 13 21:14:33 crc kubenswrapper[4689]: I1013 21:14:33.671421 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-nmrjr" podUID="d6cf5abf-3312-44a9-a2e8-4f11711fce76" containerName="registry-server" probeResult="failure" output=< Oct 13 21:14:33 crc kubenswrapper[4689]: timeout: failed to connect service ":50051" within 1s Oct 13 21:14:33 crc kubenswrapper[4689]: > Oct 13 21:14:42 crc kubenswrapper[4689]: I1013 21:14:42.459581 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jvx7z" Oct 13 21:14:42 crc kubenswrapper[4689]: I1013 21:14:42.509868 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jvx7z" Oct 13 21:14:42 crc kubenswrapper[4689]: I1013 21:14:42.699778 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nmrjr" Oct 13 21:14:42 crc kubenswrapper[4689]: I1013 21:14:42.756343 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nmrjr" Oct 13 21:14:43 crc kubenswrapper[4689]: I1013 21:14:43.233547 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smc4t" event={"ID":"8df17524-724f-469d-a215-26d230f8d2ce","Type":"ContainerStarted","Data":"dcd0c1d9433caf9d88bd254e9e76f3ca8eea5f4eee4330077c68aef4380ad9f3"} Oct 13 21:14:43 crc kubenswrapper[4689]: I1013 21:14:43.236597 4689 generic.go:334] "Generic (PLEG): container finished" podID="4940641e-0604-4623-80d6-62fbc4187027" containerID="d8bfface702250460eff6aec97e4491c842b3588eca99afb4161bd444a920527" exitCode=0 Oct 13 21:14:43 crc kubenswrapper[4689]: I1013 21:14:43.236649 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w2hcj" event={"ID":"4940641e-0604-4623-80d6-62fbc4187027","Type":"ContainerDied","Data":"d8bfface702250460eff6aec97e4491c842b3588eca99afb4161bd444a920527"} Oct 13 21:14:43 crc kubenswrapper[4689]: I1013 21:14:43.241301 4689 generic.go:334] "Generic (PLEG): container finished" podID="0dabb64b-b7fc-4428-bcfe-10c98dbe797b" containerID="c9131b80a4421886b395b3c11218eeb513256ffb42ad4252057c3e9f1e45ef0d" exitCode=0 Oct 13 21:14:43 crc kubenswrapper[4689]: I1013 21:14:43.242144 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nxrzk" event={"ID":"0dabb64b-b7fc-4428-bcfe-10c98dbe797b","Type":"ContainerDied","Data":"c9131b80a4421886b395b3c11218eeb513256ffb42ad4252057c3e9f1e45ef0d"} Oct 13 21:14:44 crc kubenswrapper[4689]: I1013 21:14:44.248884 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w2hcj" event={"ID":"4940641e-0604-4623-80d6-62fbc4187027","Type":"ContainerStarted","Data":"439f8723a958e5a2b2cc62b7eb170060f0de15e0f9a8095449af55f2f1471a4f"} Oct 13 21:14:44 crc kubenswrapper[4689]: I1013 21:14:44.250729 4689 generic.go:334] "Generic (PLEG): container finished" podID="d02f5e12-ab55-4649-94db-f248e569c2d1" containerID="59146a5dd362bc2c9a6c62dd2cee635d0924f5f0585d065a8af1617dfa2f72ec" exitCode=0 Oct 13 21:14:44 crc kubenswrapper[4689]: I1013 21:14:44.250794 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bk9hg" event={"ID":"d02f5e12-ab55-4649-94db-f248e569c2d1","Type":"ContainerDied","Data":"59146a5dd362bc2c9a6c62dd2cee635d0924f5f0585d065a8af1617dfa2f72ec"} Oct 13 21:14:44 crc kubenswrapper[4689]: I1013 21:14:44.254931 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nxrzk" event={"ID":"0dabb64b-b7fc-4428-bcfe-10c98dbe797b","Type":"ContainerStarted","Data":"58e5d6a59c834a39ec3749455c13e2bdd6abed29782f6c674cdbf7c8560ff59c"} Oct 13 21:14:44 crc kubenswrapper[4689]: I1013 21:14:44.258521 4689 generic.go:334] "Generic (PLEG): container finished" podID="5d6125f5-c788-40f7-9d1d-9e28239be338" containerID="6325ba675eef69a52d9e995be30de364986bfa1f1f80cd6f0ed56b4f1538bf97" exitCode=0 Oct 13 21:14:44 crc kubenswrapper[4689]: I1013 21:14:44.258534 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h2rlr" event={"ID":"5d6125f5-c788-40f7-9d1d-9e28239be338","Type":"ContainerDied","Data":"6325ba675eef69a52d9e995be30de364986bfa1f1f80cd6f0ed56b4f1538bf97"} Oct 13 21:14:44 crc kubenswrapper[4689]: I1013 21:14:44.260573 4689 generic.go:334] "Generic (PLEG): container finished" podID="8df17524-724f-469d-a215-26d230f8d2ce" containerID="dcd0c1d9433caf9d88bd254e9e76f3ca8eea5f4eee4330077c68aef4380ad9f3" exitCode=0 Oct 13 21:14:44 crc kubenswrapper[4689]: I1013 21:14:44.260630 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smc4t" event={"ID":"8df17524-724f-469d-a215-26d230f8d2ce","Type":"ContainerDied","Data":"dcd0c1d9433caf9d88bd254e9e76f3ca8eea5f4eee4330077c68aef4380ad9f3"} Oct 13 21:14:44 crc kubenswrapper[4689]: I1013 21:14:44.281185 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-w2hcj" podStartSLOduration=2.207717062 podStartE2EDuration="50.281153955s" podCreationTimestamp="2025-10-13 21:13:54 +0000 UTC" firstStartedPulling="2025-10-13 21:13:55.736850749 +0000 UTC m=+152.655095834" lastFinishedPulling="2025-10-13 21:14:43.810287642 +0000 UTC m=+200.728532727" observedRunningTime="2025-10-13 21:14:44.272477996 +0000 UTC m=+201.190723091" watchObservedRunningTime="2025-10-13 21:14:44.281153955 +0000 UTC m=+201.199399050" Oct 13 21:14:44 crc kubenswrapper[4689]: I1013 21:14:44.337683 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nxrzk" podStartSLOduration=2.230435662 podStartE2EDuration="51.337659795s" podCreationTimestamp="2025-10-13 21:13:53 +0000 UTC" firstStartedPulling="2025-10-13 21:13:54.690121909 +0000 UTC m=+151.608366994" lastFinishedPulling="2025-10-13 21:14:43.797346042 +0000 UTC m=+200.715591127" observedRunningTime="2025-10-13 21:14:44.316621582 +0000 UTC m=+201.234866677" watchObservedRunningTime="2025-10-13 21:14:44.337659795 +0000 UTC m=+201.255904880" Oct 13 21:14:44 crc kubenswrapper[4689]: I1013 21:14:44.535349 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-w2hcj" Oct 13 21:14:44 crc kubenswrapper[4689]: I1013 21:14:44.535420 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-w2hcj" Oct 13 21:14:44 crc kubenswrapper[4689]: I1013 21:14:44.705309 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nmrjr"] Oct 13 21:14:44 crc kubenswrapper[4689]: I1013 21:14:44.705542 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-nmrjr" podUID="d6cf5abf-3312-44a9-a2e8-4f11711fce76" containerName="registry-server" containerID="cri-o://8f7d61f2b8a7aaafcc1a4fc86c819e2b2475a97734506a0e4f8b96ede57a6d04" gracePeriod=2 Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.085131 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nmrjr" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.219410 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6cf5abf-3312-44a9-a2e8-4f11711fce76-utilities\") pod \"d6cf5abf-3312-44a9-a2e8-4f11711fce76\" (UID: \"d6cf5abf-3312-44a9-a2e8-4f11711fce76\") " Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.219726 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6cf5abf-3312-44a9-a2e8-4f11711fce76-catalog-content\") pod \"d6cf5abf-3312-44a9-a2e8-4f11711fce76\" (UID: \"d6cf5abf-3312-44a9-a2e8-4f11711fce76\") " Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.219786 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ml5l\" (UniqueName: \"kubernetes.io/projected/d6cf5abf-3312-44a9-a2e8-4f11711fce76-kube-api-access-5ml5l\") pod \"d6cf5abf-3312-44a9-a2e8-4f11711fce76\" (UID: \"d6cf5abf-3312-44a9-a2e8-4f11711fce76\") " Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.220186 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6cf5abf-3312-44a9-a2e8-4f11711fce76-utilities" (OuterVolumeSpecName: "utilities") pod "d6cf5abf-3312-44a9-a2e8-4f11711fce76" (UID: "d6cf5abf-3312-44a9-a2e8-4f11711fce76"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.227775 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6cf5abf-3312-44a9-a2e8-4f11711fce76-kube-api-access-5ml5l" (OuterVolumeSpecName: "kube-api-access-5ml5l") pod "d6cf5abf-3312-44a9-a2e8-4f11711fce76" (UID: "d6cf5abf-3312-44a9-a2e8-4f11711fce76"). InnerVolumeSpecName "kube-api-access-5ml5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.270378 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smc4t" event={"ID":"8df17524-724f-469d-a215-26d230f8d2ce","Type":"ContainerStarted","Data":"1c14d97dced57abde3daa3491f29ced6a657f87ab477158e6ec090bea280e03b"} Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.272922 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h2rlr" event={"ID":"5d6125f5-c788-40f7-9d1d-9e28239be338","Type":"ContainerStarted","Data":"c6e4c5df9ef46bc64e9ee824e31188fe9f4c689545a2004328aa9103485dcfce"} Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.275295 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bk9hg" event={"ID":"d02f5e12-ab55-4649-94db-f248e569c2d1","Type":"ContainerStarted","Data":"5b89e2aa3f93fd8f1cd63b43e50c7f27841f4c4772a3de8f60a2509c88358aca"} Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.281876 4689 generic.go:334] "Generic (PLEG): container finished" podID="d6cf5abf-3312-44a9-a2e8-4f11711fce76" containerID="8f7d61f2b8a7aaafcc1a4fc86c819e2b2475a97734506a0e4f8b96ede57a6d04" exitCode=0 Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.282032 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nmrjr" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.282397 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nmrjr" event={"ID":"d6cf5abf-3312-44a9-a2e8-4f11711fce76","Type":"ContainerDied","Data":"8f7d61f2b8a7aaafcc1a4fc86c819e2b2475a97734506a0e4f8b96ede57a6d04"} Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.282442 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nmrjr" event={"ID":"d6cf5abf-3312-44a9-a2e8-4f11711fce76","Type":"ContainerDied","Data":"30b24c642e0072f9a794732b028cad7bf2c06e63f4821b1696ca5215f9662acb"} Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.282461 4689 scope.go:117] "RemoveContainer" containerID="8f7d61f2b8a7aaafcc1a4fc86c819e2b2475a97734506a0e4f8b96ede57a6d04" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.286424 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hlhzr" event={"ID":"d3c3a8ce-5574-4dd5-acff-b56b6c188b44","Type":"ContainerStarted","Data":"5d63c29ff2ce5941ac28e04ef8005d635c9ce883863dedc68481682cf2e28489"} Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.289139 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6cf5abf-3312-44a9-a2e8-4f11711fce76-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d6cf5abf-3312-44a9-a2e8-4f11711fce76" (UID: "d6cf5abf-3312-44a9-a2e8-4f11711fce76"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.300426 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-smc4t" podStartSLOduration=2.296510316 podStartE2EDuration="50.300405302s" podCreationTimestamp="2025-10-13 21:13:55 +0000 UTC" firstStartedPulling="2025-10-13 21:13:56.771628355 +0000 UTC m=+153.689873440" lastFinishedPulling="2025-10-13 21:14:44.775523341 +0000 UTC m=+201.693768426" observedRunningTime="2025-10-13 21:14:45.292202807 +0000 UTC m=+202.210447892" watchObservedRunningTime="2025-10-13 21:14:45.300405302 +0000 UTC m=+202.218650387" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.303550 4689 scope.go:117] "RemoveContainer" containerID="0252e6e96d0db7b569891423793b9361d0eb4487de5c78c13b23e2b9485761d0" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.325635 4689 scope.go:117] "RemoveContainer" containerID="1c80ceed9b1e971821a21be9d2702f6ebfc603f842a8396d23d09a21710d61af" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.326416 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6cf5abf-3312-44a9-a2e8-4f11711fce76-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.326462 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6cf5abf-3312-44a9-a2e8-4f11711fce76-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.326480 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ml5l\" (UniqueName: \"kubernetes.io/projected/d6cf5abf-3312-44a9-a2e8-4f11711fce76-kube-api-access-5ml5l\") on node \"crc\" DevicePath \"\"" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.328493 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-h2rlr" podStartSLOduration=3.204969188 podStartE2EDuration="53.328480907s" podCreationTimestamp="2025-10-13 21:13:52 +0000 UTC" firstStartedPulling="2025-10-13 21:13:54.665152739 +0000 UTC m=+151.583397824" lastFinishedPulling="2025-10-13 21:14:44.788664458 +0000 UTC m=+201.706909543" observedRunningTime="2025-10-13 21:14:45.326069558 +0000 UTC m=+202.244314653" watchObservedRunningTime="2025-10-13 21:14:45.328480907 +0000 UTC m=+202.246725992" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.340829 4689 scope.go:117] "RemoveContainer" containerID="8f7d61f2b8a7aaafcc1a4fc86c819e2b2475a97734506a0e4f8b96ede57a6d04" Oct 13 21:14:45 crc kubenswrapper[4689]: E1013 21:14:45.341512 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f7d61f2b8a7aaafcc1a4fc86c819e2b2475a97734506a0e4f8b96ede57a6d04\": container with ID starting with 8f7d61f2b8a7aaafcc1a4fc86c819e2b2475a97734506a0e4f8b96ede57a6d04 not found: ID does not exist" containerID="8f7d61f2b8a7aaafcc1a4fc86c819e2b2475a97734506a0e4f8b96ede57a6d04" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.341566 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f7d61f2b8a7aaafcc1a4fc86c819e2b2475a97734506a0e4f8b96ede57a6d04"} err="failed to get container status \"8f7d61f2b8a7aaafcc1a4fc86c819e2b2475a97734506a0e4f8b96ede57a6d04\": rpc error: code = NotFound desc = could not find container \"8f7d61f2b8a7aaafcc1a4fc86c819e2b2475a97734506a0e4f8b96ede57a6d04\": container with ID starting with 8f7d61f2b8a7aaafcc1a4fc86c819e2b2475a97734506a0e4f8b96ede57a6d04 not found: ID does not exist" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.341654 4689 scope.go:117] "RemoveContainer" containerID="0252e6e96d0db7b569891423793b9361d0eb4487de5c78c13b23e2b9485761d0" Oct 13 21:14:45 crc kubenswrapper[4689]: E1013 21:14:45.342139 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0252e6e96d0db7b569891423793b9361d0eb4487de5c78c13b23e2b9485761d0\": container with ID starting with 0252e6e96d0db7b569891423793b9361d0eb4487de5c78c13b23e2b9485761d0 not found: ID does not exist" containerID="0252e6e96d0db7b569891423793b9361d0eb4487de5c78c13b23e2b9485761d0" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.342193 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0252e6e96d0db7b569891423793b9361d0eb4487de5c78c13b23e2b9485761d0"} err="failed to get container status \"0252e6e96d0db7b569891423793b9361d0eb4487de5c78c13b23e2b9485761d0\": rpc error: code = NotFound desc = could not find container \"0252e6e96d0db7b569891423793b9361d0eb4487de5c78c13b23e2b9485761d0\": container with ID starting with 0252e6e96d0db7b569891423793b9361d0eb4487de5c78c13b23e2b9485761d0 not found: ID does not exist" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.342220 4689 scope.go:117] "RemoveContainer" containerID="1c80ceed9b1e971821a21be9d2702f6ebfc603f842a8396d23d09a21710d61af" Oct 13 21:14:45 crc kubenswrapper[4689]: E1013 21:14:45.342484 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c80ceed9b1e971821a21be9d2702f6ebfc603f842a8396d23d09a21710d61af\": container with ID starting with 1c80ceed9b1e971821a21be9d2702f6ebfc603f842a8396d23d09a21710d61af not found: ID does not exist" containerID="1c80ceed9b1e971821a21be9d2702f6ebfc603f842a8396d23d09a21710d61af" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.342511 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c80ceed9b1e971821a21be9d2702f6ebfc603f842a8396d23d09a21710d61af"} err="failed to get container status \"1c80ceed9b1e971821a21be9d2702f6ebfc603f842a8396d23d09a21710d61af\": rpc error: code = NotFound desc = could not find container \"1c80ceed9b1e971821a21be9d2702f6ebfc603f842a8396d23d09a21710d61af\": container with ID starting with 1c80ceed9b1e971821a21be9d2702f6ebfc603f842a8396d23d09a21710d61af not found: ID does not exist" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.436535 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-smc4t" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.436605 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-smc4t" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.593705 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-w2hcj" podUID="4940641e-0604-4623-80d6-62fbc4187027" containerName="registry-server" probeResult="failure" output=< Oct 13 21:14:45 crc kubenswrapper[4689]: timeout: failed to connect service ":50051" within 1s Oct 13 21:14:45 crc kubenswrapper[4689]: > Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.613018 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bk9hg" podStartSLOduration=4.569519373 podStartE2EDuration="54.612998639s" podCreationTimestamp="2025-10-13 21:13:51 +0000 UTC" firstStartedPulling="2025-10-13 21:13:54.700256575 +0000 UTC m=+151.618501660" lastFinishedPulling="2025-10-13 21:14:44.743735841 +0000 UTC m=+201.661980926" observedRunningTime="2025-10-13 21:14:45.383896525 +0000 UTC m=+202.302141610" watchObservedRunningTime="2025-10-13 21:14:45.612998639 +0000 UTC m=+202.531243724" Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.614239 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nmrjr"] Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.620425 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-nmrjr"] Oct 13 21:14:45 crc kubenswrapper[4689]: I1013 21:14:45.874910 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6cf5abf-3312-44a9-a2e8-4f11711fce76" path="/var/lib/kubelet/pods/d6cf5abf-3312-44a9-a2e8-4f11711fce76/volumes" Oct 13 21:14:46 crc kubenswrapper[4689]: I1013 21:14:46.294766 4689 generic.go:334] "Generic (PLEG): container finished" podID="d3c3a8ce-5574-4dd5-acff-b56b6c188b44" containerID="5d63c29ff2ce5941ac28e04ef8005d635c9ce883863dedc68481682cf2e28489" exitCode=0 Oct 13 21:14:46 crc kubenswrapper[4689]: I1013 21:14:46.294875 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hlhzr" event={"ID":"d3c3a8ce-5574-4dd5-acff-b56b6c188b44","Type":"ContainerDied","Data":"5d63c29ff2ce5941ac28e04ef8005d635c9ce883863dedc68481682cf2e28489"} Oct 13 21:14:46 crc kubenswrapper[4689]: I1013 21:14:46.479323 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-smc4t" podUID="8df17524-724f-469d-a215-26d230f8d2ce" containerName="registry-server" probeResult="failure" output=< Oct 13 21:14:46 crc kubenswrapper[4689]: timeout: failed to connect service ":50051" within 1s Oct 13 21:14:46 crc kubenswrapper[4689]: > Oct 13 21:14:51 crc kubenswrapper[4689]: I1013 21:14:51.323522 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hlhzr" event={"ID":"d3c3a8ce-5574-4dd5-acff-b56b6c188b44","Type":"ContainerStarted","Data":"0c5e18be86c7990d0e5343b7aa8a96e12c8bc0e36072cde0cb6d808601178528"} Oct 13 21:14:51 crc kubenswrapper[4689]: I1013 21:14:51.350714 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hlhzr" podStartSLOduration=4.196888313 podStartE2EDuration="56.350681006s" podCreationTimestamp="2025-10-13 21:13:55 +0000 UTC" firstStartedPulling="2025-10-13 21:13:57.807322245 +0000 UTC m=+154.725567330" lastFinishedPulling="2025-10-13 21:14:49.961114938 +0000 UTC m=+206.879360023" observedRunningTime="2025-10-13 21:14:51.349301087 +0000 UTC m=+208.267546172" watchObservedRunningTime="2025-10-13 21:14:51.350681006 +0000 UTC m=+208.268926131" Oct 13 21:14:52 crc kubenswrapper[4689]: I1013 21:14:52.522501 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bk9hg" Oct 13 21:14:52 crc kubenswrapper[4689]: I1013 21:14:52.522570 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bk9hg" Oct 13 21:14:52 crc kubenswrapper[4689]: I1013 21:14:52.564258 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bk9hg" Oct 13 21:14:52 crc kubenswrapper[4689]: I1013 21:14:52.703614 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-h2rlr" Oct 13 21:14:52 crc kubenswrapper[4689]: I1013 21:14:52.703669 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-h2rlr" Oct 13 21:14:52 crc kubenswrapper[4689]: I1013 21:14:52.743460 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-h2rlr" Oct 13 21:14:53 crc kubenswrapper[4689]: I1013 21:14:53.377535 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-h2rlr" Oct 13 21:14:53 crc kubenswrapper[4689]: I1013 21:14:53.382497 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bk9hg" Oct 13 21:14:53 crc kubenswrapper[4689]: I1013 21:14:53.859346 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:14:53 crc kubenswrapper[4689]: I1013 21:14:53.859432 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:14:53 crc kubenswrapper[4689]: I1013 21:14:53.859959 4689 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:14:53 crc kubenswrapper[4689]: I1013 21:14:53.860554 4689 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340"} pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 21:14:53 crc kubenswrapper[4689]: I1013 21:14:53.860648 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" containerID="cri-o://547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340" gracePeriod=600 Oct 13 21:14:54 crc kubenswrapper[4689]: I1013 21:14:54.062768 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nxrzk" Oct 13 21:14:54 crc kubenswrapper[4689]: I1013 21:14:54.063068 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nxrzk" Oct 13 21:14:54 crc kubenswrapper[4689]: I1013 21:14:54.117030 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nxrzk" Oct 13 21:14:54 crc kubenswrapper[4689]: I1013 21:14:54.304768 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h2rlr"] Oct 13 21:14:54 crc kubenswrapper[4689]: I1013 21:14:54.341268 4689 generic.go:334] "Generic (PLEG): container finished" podID="1863da92-265f-451e-a741-a184c8d3f781" containerID="547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340" exitCode=0 Oct 13 21:14:54 crc kubenswrapper[4689]: I1013 21:14:54.341344 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerDied","Data":"547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340"} Oct 13 21:14:54 crc kubenswrapper[4689]: I1013 21:14:54.341827 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerStarted","Data":"a2f64fccdbff6f9fbf34a77ede4edac00241e08c4bc51864952122589c103b1a"} Oct 13 21:14:54 crc kubenswrapper[4689]: I1013 21:14:54.396956 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nxrzk" Oct 13 21:14:54 crc kubenswrapper[4689]: I1013 21:14:54.579293 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-w2hcj" Oct 13 21:14:54 crc kubenswrapper[4689]: I1013 21:14:54.619747 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-w2hcj" Oct 13 21:14:55 crc kubenswrapper[4689]: I1013 21:14:55.346819 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-h2rlr" podUID="5d6125f5-c788-40f7-9d1d-9e28239be338" containerName="registry-server" containerID="cri-o://c6e4c5df9ef46bc64e9ee824e31188fe9f4c689545a2004328aa9103485dcfce" gracePeriod=2 Oct 13 21:14:55 crc kubenswrapper[4689]: I1013 21:14:55.488400 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-smc4t" Oct 13 21:14:55 crc kubenswrapper[4689]: I1013 21:14:55.538900 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-smc4t" Oct 13 21:14:55 crc kubenswrapper[4689]: I1013 21:14:55.705321 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h2rlr" Oct 13 21:14:55 crc kubenswrapper[4689]: I1013 21:14:55.785114 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d6125f5-c788-40f7-9d1d-9e28239be338-utilities\") pod \"5d6125f5-c788-40f7-9d1d-9e28239be338\" (UID: \"5d6125f5-c788-40f7-9d1d-9e28239be338\") " Oct 13 21:14:55 crc kubenswrapper[4689]: I1013 21:14:55.785298 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2h9q\" (UniqueName: \"kubernetes.io/projected/5d6125f5-c788-40f7-9d1d-9e28239be338-kube-api-access-z2h9q\") pod \"5d6125f5-c788-40f7-9d1d-9e28239be338\" (UID: \"5d6125f5-c788-40f7-9d1d-9e28239be338\") " Oct 13 21:14:55 crc kubenswrapper[4689]: I1013 21:14:55.785351 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d6125f5-c788-40f7-9d1d-9e28239be338-catalog-content\") pod \"5d6125f5-c788-40f7-9d1d-9e28239be338\" (UID: \"5d6125f5-c788-40f7-9d1d-9e28239be338\") " Oct 13 21:14:55 crc kubenswrapper[4689]: I1013 21:14:55.786015 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d6125f5-c788-40f7-9d1d-9e28239be338-utilities" (OuterVolumeSpecName: "utilities") pod "5d6125f5-c788-40f7-9d1d-9e28239be338" (UID: "5d6125f5-c788-40f7-9d1d-9e28239be338"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:14:55 crc kubenswrapper[4689]: I1013 21:14:55.791827 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d6125f5-c788-40f7-9d1d-9e28239be338-kube-api-access-z2h9q" (OuterVolumeSpecName: "kube-api-access-z2h9q") pod "5d6125f5-c788-40f7-9d1d-9e28239be338" (UID: "5d6125f5-c788-40f7-9d1d-9e28239be338"). InnerVolumeSpecName "kube-api-access-z2h9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:14:55 crc kubenswrapper[4689]: I1013 21:14:55.836344 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d6125f5-c788-40f7-9d1d-9e28239be338-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5d6125f5-c788-40f7-9d1d-9e28239be338" (UID: "5d6125f5-c788-40f7-9d1d-9e28239be338"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:14:55 crc kubenswrapper[4689]: I1013 21:14:55.887355 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2h9q\" (UniqueName: \"kubernetes.io/projected/5d6125f5-c788-40f7-9d1d-9e28239be338-kube-api-access-z2h9q\") on node \"crc\" DevicePath \"\"" Oct 13 21:14:55 crc kubenswrapper[4689]: I1013 21:14:55.887713 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d6125f5-c788-40f7-9d1d-9e28239be338-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:14:55 crc kubenswrapper[4689]: I1013 21:14:55.887729 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d6125f5-c788-40f7-9d1d-9e28239be338-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:14:55 crc kubenswrapper[4689]: I1013 21:14:55.895643 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hlhzr" Oct 13 21:14:55 crc kubenswrapper[4689]: I1013 21:14:55.895691 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hlhzr" Oct 13 21:14:55 crc kubenswrapper[4689]: I1013 21:14:55.946607 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hlhzr" Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.354779 4689 generic.go:334] "Generic (PLEG): container finished" podID="5d6125f5-c788-40f7-9d1d-9e28239be338" containerID="c6e4c5df9ef46bc64e9ee824e31188fe9f4c689545a2004328aa9103485dcfce" exitCode=0 Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.354873 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h2rlr" Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.354891 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h2rlr" event={"ID":"5d6125f5-c788-40f7-9d1d-9e28239be338","Type":"ContainerDied","Data":"c6e4c5df9ef46bc64e9ee824e31188fe9f4c689545a2004328aa9103485dcfce"} Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.354955 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h2rlr" event={"ID":"5d6125f5-c788-40f7-9d1d-9e28239be338","Type":"ContainerDied","Data":"146760d6720c978289c242d011b91beb4f1f8bcf10b6db209b7fb7228b5d790c"} Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.354989 4689 scope.go:117] "RemoveContainer" containerID="c6e4c5df9ef46bc64e9ee824e31188fe9f4c689545a2004328aa9103485dcfce" Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.376389 4689 scope.go:117] "RemoveContainer" containerID="6325ba675eef69a52d9e995be30de364986bfa1f1f80cd6f0ed56b4f1538bf97" Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.388824 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h2rlr"] Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.396639 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-h2rlr"] Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.404964 4689 scope.go:117] "RemoveContainer" containerID="71eb5f11e3d4e172d3db159b74a9b2b4986b94ccf2d7601bd4e28f68aff42fc6" Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.416810 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hlhzr" Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.429495 4689 scope.go:117] "RemoveContainer" containerID="c6e4c5df9ef46bc64e9ee824e31188fe9f4c689545a2004328aa9103485dcfce" Oct 13 21:14:56 crc kubenswrapper[4689]: E1013 21:14:56.430382 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6e4c5df9ef46bc64e9ee824e31188fe9f4c689545a2004328aa9103485dcfce\": container with ID starting with c6e4c5df9ef46bc64e9ee824e31188fe9f4c689545a2004328aa9103485dcfce not found: ID does not exist" containerID="c6e4c5df9ef46bc64e9ee824e31188fe9f4c689545a2004328aa9103485dcfce" Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.430416 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6e4c5df9ef46bc64e9ee824e31188fe9f4c689545a2004328aa9103485dcfce"} err="failed to get container status \"c6e4c5df9ef46bc64e9ee824e31188fe9f4c689545a2004328aa9103485dcfce\": rpc error: code = NotFound desc = could not find container \"c6e4c5df9ef46bc64e9ee824e31188fe9f4c689545a2004328aa9103485dcfce\": container with ID starting with c6e4c5df9ef46bc64e9ee824e31188fe9f4c689545a2004328aa9103485dcfce not found: ID does not exist" Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.430438 4689 scope.go:117] "RemoveContainer" containerID="6325ba675eef69a52d9e995be30de364986bfa1f1f80cd6f0ed56b4f1538bf97" Oct 13 21:14:56 crc kubenswrapper[4689]: E1013 21:14:56.430757 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6325ba675eef69a52d9e995be30de364986bfa1f1f80cd6f0ed56b4f1538bf97\": container with ID starting with 6325ba675eef69a52d9e995be30de364986bfa1f1f80cd6f0ed56b4f1538bf97 not found: ID does not exist" containerID="6325ba675eef69a52d9e995be30de364986bfa1f1f80cd6f0ed56b4f1538bf97" Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.430784 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6325ba675eef69a52d9e995be30de364986bfa1f1f80cd6f0ed56b4f1538bf97"} err="failed to get container status \"6325ba675eef69a52d9e995be30de364986bfa1f1f80cd6f0ed56b4f1538bf97\": rpc error: code = NotFound desc = could not find container \"6325ba675eef69a52d9e995be30de364986bfa1f1f80cd6f0ed56b4f1538bf97\": container with ID starting with 6325ba675eef69a52d9e995be30de364986bfa1f1f80cd6f0ed56b4f1538bf97 not found: ID does not exist" Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.430798 4689 scope.go:117] "RemoveContainer" containerID="71eb5f11e3d4e172d3db159b74a9b2b4986b94ccf2d7601bd4e28f68aff42fc6" Oct 13 21:14:56 crc kubenswrapper[4689]: E1013 21:14:56.431080 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71eb5f11e3d4e172d3db159b74a9b2b4986b94ccf2d7601bd4e28f68aff42fc6\": container with ID starting with 71eb5f11e3d4e172d3db159b74a9b2b4986b94ccf2d7601bd4e28f68aff42fc6 not found: ID does not exist" containerID="71eb5f11e3d4e172d3db159b74a9b2b4986b94ccf2d7601bd4e28f68aff42fc6" Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.431105 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71eb5f11e3d4e172d3db159b74a9b2b4986b94ccf2d7601bd4e28f68aff42fc6"} err="failed to get container status \"71eb5f11e3d4e172d3db159b74a9b2b4986b94ccf2d7601bd4e28f68aff42fc6\": rpc error: code = NotFound desc = could not find container \"71eb5f11e3d4e172d3db159b74a9b2b4986b94ccf2d7601bd4e28f68aff42fc6\": container with ID starting with 71eb5f11e3d4e172d3db159b74a9b2b4986b94ccf2d7601bd4e28f68aff42fc6 not found: ID does not exist" Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.507111 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w2hcj"] Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.507406 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-w2hcj" podUID="4940641e-0604-4623-80d6-62fbc4187027" containerName="registry-server" containerID="cri-o://439f8723a958e5a2b2cc62b7eb170060f0de15e0f9a8095449af55f2f1471a4f" gracePeriod=2 Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.838081 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w2hcj" Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.903132 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4z294\" (UniqueName: \"kubernetes.io/projected/4940641e-0604-4623-80d6-62fbc4187027-kube-api-access-4z294\") pod \"4940641e-0604-4623-80d6-62fbc4187027\" (UID: \"4940641e-0604-4623-80d6-62fbc4187027\") " Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.903215 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4940641e-0604-4623-80d6-62fbc4187027-utilities\") pod \"4940641e-0604-4623-80d6-62fbc4187027\" (UID: \"4940641e-0604-4623-80d6-62fbc4187027\") " Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.903292 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4940641e-0604-4623-80d6-62fbc4187027-catalog-content\") pod \"4940641e-0604-4623-80d6-62fbc4187027\" (UID: \"4940641e-0604-4623-80d6-62fbc4187027\") " Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.904466 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4940641e-0604-4623-80d6-62fbc4187027-utilities" (OuterVolumeSpecName: "utilities") pod "4940641e-0604-4623-80d6-62fbc4187027" (UID: "4940641e-0604-4623-80d6-62fbc4187027"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.907935 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4940641e-0604-4623-80d6-62fbc4187027-kube-api-access-4z294" (OuterVolumeSpecName: "kube-api-access-4z294") pod "4940641e-0604-4623-80d6-62fbc4187027" (UID: "4940641e-0604-4623-80d6-62fbc4187027"). InnerVolumeSpecName "kube-api-access-4z294". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:14:56 crc kubenswrapper[4689]: I1013 21:14:56.927174 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4940641e-0604-4623-80d6-62fbc4187027-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4940641e-0604-4623-80d6-62fbc4187027" (UID: "4940641e-0604-4623-80d6-62fbc4187027"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.005387 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4940641e-0604-4623-80d6-62fbc4187027-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.005440 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4z294\" (UniqueName: \"kubernetes.io/projected/4940641e-0604-4623-80d6-62fbc4187027-kube-api-access-4z294\") on node \"crc\" DevicePath \"\"" Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.005459 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4940641e-0604-4623-80d6-62fbc4187027-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.363262 4689 generic.go:334] "Generic (PLEG): container finished" podID="4940641e-0604-4623-80d6-62fbc4187027" containerID="439f8723a958e5a2b2cc62b7eb170060f0de15e0f9a8095449af55f2f1471a4f" exitCode=0 Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.363333 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w2hcj" Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.363350 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w2hcj" event={"ID":"4940641e-0604-4623-80d6-62fbc4187027","Type":"ContainerDied","Data":"439f8723a958e5a2b2cc62b7eb170060f0de15e0f9a8095449af55f2f1471a4f"} Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.363410 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w2hcj" event={"ID":"4940641e-0604-4623-80d6-62fbc4187027","Type":"ContainerDied","Data":"ea205f01a45c0185a0a5c205ab9802552f8c82a22efe5621c1d7cba54a177da3"} Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.363434 4689 scope.go:117] "RemoveContainer" containerID="439f8723a958e5a2b2cc62b7eb170060f0de15e0f9a8095449af55f2f1471a4f" Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.377672 4689 scope.go:117] "RemoveContainer" containerID="d8bfface702250460eff6aec97e4491c842b3588eca99afb4161bd444a920527" Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.404229 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w2hcj"] Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.404278 4689 scope.go:117] "RemoveContainer" containerID="600e929a9bdfb214bc041da14517b437c464b83f553784ef5d362aedade7ffb4" Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.408135 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-w2hcj"] Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.418139 4689 scope.go:117] "RemoveContainer" containerID="439f8723a958e5a2b2cc62b7eb170060f0de15e0f9a8095449af55f2f1471a4f" Oct 13 21:14:57 crc kubenswrapper[4689]: E1013 21:14:57.418549 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"439f8723a958e5a2b2cc62b7eb170060f0de15e0f9a8095449af55f2f1471a4f\": container with ID starting with 439f8723a958e5a2b2cc62b7eb170060f0de15e0f9a8095449af55f2f1471a4f not found: ID does not exist" containerID="439f8723a958e5a2b2cc62b7eb170060f0de15e0f9a8095449af55f2f1471a4f" Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.418605 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"439f8723a958e5a2b2cc62b7eb170060f0de15e0f9a8095449af55f2f1471a4f"} err="failed to get container status \"439f8723a958e5a2b2cc62b7eb170060f0de15e0f9a8095449af55f2f1471a4f\": rpc error: code = NotFound desc = could not find container \"439f8723a958e5a2b2cc62b7eb170060f0de15e0f9a8095449af55f2f1471a4f\": container with ID starting with 439f8723a958e5a2b2cc62b7eb170060f0de15e0f9a8095449af55f2f1471a4f not found: ID does not exist" Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.418639 4689 scope.go:117] "RemoveContainer" containerID="d8bfface702250460eff6aec97e4491c842b3588eca99afb4161bd444a920527" Oct 13 21:14:57 crc kubenswrapper[4689]: E1013 21:14:57.419034 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8bfface702250460eff6aec97e4491c842b3588eca99afb4161bd444a920527\": container with ID starting with d8bfface702250460eff6aec97e4491c842b3588eca99afb4161bd444a920527 not found: ID does not exist" containerID="d8bfface702250460eff6aec97e4491c842b3588eca99afb4161bd444a920527" Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.419064 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8bfface702250460eff6aec97e4491c842b3588eca99afb4161bd444a920527"} err="failed to get container status \"d8bfface702250460eff6aec97e4491c842b3588eca99afb4161bd444a920527\": rpc error: code = NotFound desc = could not find container \"d8bfface702250460eff6aec97e4491c842b3588eca99afb4161bd444a920527\": container with ID starting with d8bfface702250460eff6aec97e4491c842b3588eca99afb4161bd444a920527 not found: ID does not exist" Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.419107 4689 scope.go:117] "RemoveContainer" containerID="600e929a9bdfb214bc041da14517b437c464b83f553784ef5d362aedade7ffb4" Oct 13 21:14:57 crc kubenswrapper[4689]: E1013 21:14:57.419394 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"600e929a9bdfb214bc041da14517b437c464b83f553784ef5d362aedade7ffb4\": container with ID starting with 600e929a9bdfb214bc041da14517b437c464b83f553784ef5d362aedade7ffb4 not found: ID does not exist" containerID="600e929a9bdfb214bc041da14517b437c464b83f553784ef5d362aedade7ffb4" Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.419436 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"600e929a9bdfb214bc041da14517b437c464b83f553784ef5d362aedade7ffb4"} err="failed to get container status \"600e929a9bdfb214bc041da14517b437c464b83f553784ef5d362aedade7ffb4\": rpc error: code = NotFound desc = could not find container \"600e929a9bdfb214bc041da14517b437c464b83f553784ef5d362aedade7ffb4\": container with ID starting with 600e929a9bdfb214bc041da14517b437c464b83f553784ef5d362aedade7ffb4 not found: ID does not exist" Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.877401 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4940641e-0604-4623-80d6-62fbc4187027" path="/var/lib/kubelet/pods/4940641e-0604-4623-80d6-62fbc4187027/volumes" Oct 13 21:14:57 crc kubenswrapper[4689]: I1013 21:14:57.878288 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d6125f5-c788-40f7-9d1d-9e28239be338" path="/var/lib/kubelet/pods/5d6125f5-c788-40f7-9d1d-9e28239be338/volumes" Oct 13 21:14:58 crc kubenswrapper[4689]: I1013 21:14:58.710012 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hlhzr"] Oct 13 21:14:58 crc kubenswrapper[4689]: I1013 21:14:58.710235 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hlhzr" podUID="d3c3a8ce-5574-4dd5-acff-b56b6c188b44" containerName="registry-server" containerID="cri-o://0c5e18be86c7990d0e5343b7aa8a96e12c8bc0e36072cde0cb6d808601178528" gracePeriod=2 Oct 13 21:14:59 crc kubenswrapper[4689]: I1013 21:14:59.380605 4689 generic.go:334] "Generic (PLEG): container finished" podID="d3c3a8ce-5574-4dd5-acff-b56b6c188b44" containerID="0c5e18be86c7990d0e5343b7aa8a96e12c8bc0e36072cde0cb6d808601178528" exitCode=0 Oct 13 21:14:59 crc kubenswrapper[4689]: I1013 21:14:59.380650 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hlhzr" event={"ID":"d3c3a8ce-5574-4dd5-acff-b56b6c188b44","Type":"ContainerDied","Data":"0c5e18be86c7990d0e5343b7aa8a96e12c8bc0e36072cde0cb6d808601178528"} Oct 13 21:14:59 crc kubenswrapper[4689]: I1013 21:14:59.624999 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hlhzr" Oct 13 21:14:59 crc kubenswrapper[4689]: I1013 21:14:59.741050 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5mqjk\" (UniqueName: \"kubernetes.io/projected/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-kube-api-access-5mqjk\") pod \"d3c3a8ce-5574-4dd5-acff-b56b6c188b44\" (UID: \"d3c3a8ce-5574-4dd5-acff-b56b6c188b44\") " Oct 13 21:14:59 crc kubenswrapper[4689]: I1013 21:14:59.741140 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-catalog-content\") pod \"d3c3a8ce-5574-4dd5-acff-b56b6c188b44\" (UID: \"d3c3a8ce-5574-4dd5-acff-b56b6c188b44\") " Oct 13 21:14:59 crc kubenswrapper[4689]: I1013 21:14:59.741214 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-utilities\") pod \"d3c3a8ce-5574-4dd5-acff-b56b6c188b44\" (UID: \"d3c3a8ce-5574-4dd5-acff-b56b6c188b44\") " Oct 13 21:14:59 crc kubenswrapper[4689]: I1013 21:14:59.742164 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-utilities" (OuterVolumeSpecName: "utilities") pod "d3c3a8ce-5574-4dd5-acff-b56b6c188b44" (UID: "d3c3a8ce-5574-4dd5-acff-b56b6c188b44"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:14:59 crc kubenswrapper[4689]: I1013 21:14:59.746719 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-kube-api-access-5mqjk" (OuterVolumeSpecName: "kube-api-access-5mqjk") pod "d3c3a8ce-5574-4dd5-acff-b56b6c188b44" (UID: "d3c3a8ce-5574-4dd5-acff-b56b6c188b44"). InnerVolumeSpecName "kube-api-access-5mqjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:14:59 crc kubenswrapper[4689]: I1013 21:14:59.830914 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d3c3a8ce-5574-4dd5-acff-b56b6c188b44" (UID: "d3c3a8ce-5574-4dd5-acff-b56b6c188b44"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:14:59 crc kubenswrapper[4689]: I1013 21:14:59.842247 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5mqjk\" (UniqueName: \"kubernetes.io/projected/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-kube-api-access-5mqjk\") on node \"crc\" DevicePath \"\"" Oct 13 21:14:59 crc kubenswrapper[4689]: I1013 21:14:59.842286 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:14:59 crc kubenswrapper[4689]: I1013 21:14:59.842297 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3c3a8ce-5574-4dd5-acff-b56b6c188b44-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.150232 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj"] Oct 13 21:15:00 crc kubenswrapper[4689]: E1013 21:15:00.150745 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6cf5abf-3312-44a9-a2e8-4f11711fce76" containerName="extract-utilities" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.150757 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6cf5abf-3312-44a9-a2e8-4f11711fce76" containerName="extract-utilities" Oct 13 21:15:00 crc kubenswrapper[4689]: E1013 21:15:00.150768 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="915ec69a-0839-4a93-9d1e-d79ae5620ae2" containerName="pruner" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.150775 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="915ec69a-0839-4a93-9d1e-d79ae5620ae2" containerName="pruner" Oct 13 21:15:00 crc kubenswrapper[4689]: E1013 21:15:00.150783 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6cf5abf-3312-44a9-a2e8-4f11711fce76" containerName="registry-server" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.150789 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6cf5abf-3312-44a9-a2e8-4f11711fce76" containerName="registry-server" Oct 13 21:15:00 crc kubenswrapper[4689]: E1013 21:15:00.150797 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4940641e-0604-4623-80d6-62fbc4187027" containerName="extract-content" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.150804 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="4940641e-0604-4623-80d6-62fbc4187027" containerName="extract-content" Oct 13 21:15:00 crc kubenswrapper[4689]: E1013 21:15:00.150813 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3c3a8ce-5574-4dd5-acff-b56b6c188b44" containerName="extract-content" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.150819 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3c3a8ce-5574-4dd5-acff-b56b6c188b44" containerName="extract-content" Oct 13 21:15:00 crc kubenswrapper[4689]: E1013 21:15:00.150830 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d6125f5-c788-40f7-9d1d-9e28239be338" containerName="extract-content" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.150837 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d6125f5-c788-40f7-9d1d-9e28239be338" containerName="extract-content" Oct 13 21:15:00 crc kubenswrapper[4689]: E1013 21:15:00.150844 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4940641e-0604-4623-80d6-62fbc4187027" containerName="extract-utilities" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.150850 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="4940641e-0604-4623-80d6-62fbc4187027" containerName="extract-utilities" Oct 13 21:15:00 crc kubenswrapper[4689]: E1013 21:15:00.150857 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3c3a8ce-5574-4dd5-acff-b56b6c188b44" containerName="extract-utilities" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.150864 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3c3a8ce-5574-4dd5-acff-b56b6c188b44" containerName="extract-utilities" Oct 13 21:15:00 crc kubenswrapper[4689]: E1013 21:15:00.150875 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d6125f5-c788-40f7-9d1d-9e28239be338" containerName="registry-server" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.150923 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d6125f5-c788-40f7-9d1d-9e28239be338" containerName="registry-server" Oct 13 21:15:00 crc kubenswrapper[4689]: E1013 21:15:00.150932 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6cf5abf-3312-44a9-a2e8-4f11711fce76" containerName="extract-content" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.150941 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6cf5abf-3312-44a9-a2e8-4f11711fce76" containerName="extract-content" Oct 13 21:15:00 crc kubenswrapper[4689]: E1013 21:15:00.150955 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4940641e-0604-4623-80d6-62fbc4187027" containerName="registry-server" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.150967 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="4940641e-0604-4623-80d6-62fbc4187027" containerName="registry-server" Oct 13 21:15:00 crc kubenswrapper[4689]: E1013 21:15:00.150978 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3c3a8ce-5574-4dd5-acff-b56b6c188b44" containerName="registry-server" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.150985 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3c3a8ce-5574-4dd5-acff-b56b6c188b44" containerName="registry-server" Oct 13 21:15:00 crc kubenswrapper[4689]: E1013 21:15:00.150997 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d6125f5-c788-40f7-9d1d-9e28239be338" containerName="extract-utilities" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.151003 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d6125f5-c788-40f7-9d1d-9e28239be338" containerName="extract-utilities" Oct 13 21:15:00 crc kubenswrapper[4689]: E1013 21:15:00.151015 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20d37033-06c5-4468-bfb1-35459bf71b15" containerName="pruner" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.151022 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="20d37033-06c5-4468-bfb1-35459bf71b15" containerName="pruner" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.151141 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3c3a8ce-5574-4dd5-acff-b56b6c188b44" containerName="registry-server" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.151158 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d6125f5-c788-40f7-9d1d-9e28239be338" containerName="registry-server" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.151169 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="915ec69a-0839-4a93-9d1e-d79ae5620ae2" containerName="pruner" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.151183 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6cf5abf-3312-44a9-a2e8-4f11711fce76" containerName="registry-server" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.151199 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="4940641e-0604-4623-80d6-62fbc4187027" containerName="registry-server" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.151205 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="20d37033-06c5-4468-bfb1-35459bf71b15" containerName="pruner" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.151769 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.155763 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.155893 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.204091 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj"] Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.247528 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-config-volume\") pod \"collect-profiles-29339835-96hsj\" (UID: \"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.247569 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-secret-volume\") pod \"collect-profiles-29339835-96hsj\" (UID: \"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.247642 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55b4c\" (UniqueName: \"kubernetes.io/projected/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-kube-api-access-55b4c\") pod \"collect-profiles-29339835-96hsj\" (UID: \"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.349563 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-config-volume\") pod \"collect-profiles-29339835-96hsj\" (UID: \"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.349878 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-secret-volume\") pod \"collect-profiles-29339835-96hsj\" (UID: \"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.349988 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55b4c\" (UniqueName: \"kubernetes.io/projected/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-kube-api-access-55b4c\") pod \"collect-profiles-29339835-96hsj\" (UID: \"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.350620 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-config-volume\") pod \"collect-profiles-29339835-96hsj\" (UID: \"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.362280 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-secret-volume\") pod \"collect-profiles-29339835-96hsj\" (UID: \"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.367563 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55b4c\" (UniqueName: \"kubernetes.io/projected/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-kube-api-access-55b4c\") pod \"collect-profiles-29339835-96hsj\" (UID: \"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.390048 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hlhzr" event={"ID":"d3c3a8ce-5574-4dd5-acff-b56b6c188b44","Type":"ContainerDied","Data":"c99637177f768aed123e2403a60a18bfcbcaf900c0ad46bb90a6584f20d78a66"} Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.390115 4689 scope.go:117] "RemoveContainer" containerID="0c5e18be86c7990d0e5343b7aa8a96e12c8bc0e36072cde0cb6d808601178528" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.390133 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hlhzr" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.406364 4689 scope.go:117] "RemoveContainer" containerID="5d63c29ff2ce5941ac28e04ef8005d635c9ce883863dedc68481682cf2e28489" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.418754 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hlhzr"] Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.423450 4689 scope.go:117] "RemoveContainer" containerID="05f40238f2a06a34715c9b261fc3d49a62e8e3ba1b56095705176090fba35c54" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.427044 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hlhzr"] Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.466984 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj" Oct 13 21:15:00 crc kubenswrapper[4689]: I1013 21:15:00.644893 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj"] Oct 13 21:15:01 crc kubenswrapper[4689]: I1013 21:15:01.398500 4689 generic.go:334] "Generic (PLEG): container finished" podID="8c4cf8f8-edb9-4a23-832a-bf70c05cdce6" containerID="7cb83203217a35475839a6768bd2fbe9681c24d31eea255de13408cbc233ce6a" exitCode=0 Oct 13 21:15:01 crc kubenswrapper[4689]: I1013 21:15:01.398545 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj" event={"ID":"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6","Type":"ContainerDied","Data":"7cb83203217a35475839a6768bd2fbe9681c24d31eea255de13408cbc233ce6a"} Oct 13 21:15:01 crc kubenswrapper[4689]: I1013 21:15:01.398569 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj" event={"ID":"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6","Type":"ContainerStarted","Data":"dfc6c6e94ed52350f346441bd0d8bebe475fd7c5f500bb0d1140c8c22d783d1f"} Oct 13 21:15:01 crc kubenswrapper[4689]: I1013 21:15:01.874773 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3c3a8ce-5574-4dd5-acff-b56b6c188b44" path="/var/lib/kubelet/pods/d3c3a8ce-5574-4dd5-acff-b56b6c188b44/volumes" Oct 13 21:15:02 crc kubenswrapper[4689]: I1013 21:15:02.618383 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj" Oct 13 21:15:02 crc kubenswrapper[4689]: I1013 21:15:02.696282 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-config-volume\") pod \"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6\" (UID: \"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6\") " Oct 13 21:15:02 crc kubenswrapper[4689]: I1013 21:15:02.696707 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55b4c\" (UniqueName: \"kubernetes.io/projected/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-kube-api-access-55b4c\") pod \"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6\" (UID: \"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6\") " Oct 13 21:15:02 crc kubenswrapper[4689]: I1013 21:15:02.696823 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-secret-volume\") pod \"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6\" (UID: \"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6\") " Oct 13 21:15:02 crc kubenswrapper[4689]: I1013 21:15:02.697069 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-config-volume" (OuterVolumeSpecName: "config-volume") pod "8c4cf8f8-edb9-4a23-832a-bf70c05cdce6" (UID: "8c4cf8f8-edb9-4a23-832a-bf70c05cdce6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:15:02 crc kubenswrapper[4689]: I1013 21:15:02.702922 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-kube-api-access-55b4c" (OuterVolumeSpecName: "kube-api-access-55b4c") pod "8c4cf8f8-edb9-4a23-832a-bf70c05cdce6" (UID: "8c4cf8f8-edb9-4a23-832a-bf70c05cdce6"). InnerVolumeSpecName "kube-api-access-55b4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:15:02 crc kubenswrapper[4689]: I1013 21:15:02.703153 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8c4cf8f8-edb9-4a23-832a-bf70c05cdce6" (UID: "8c4cf8f8-edb9-4a23-832a-bf70c05cdce6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:15:02 crc kubenswrapper[4689]: I1013 21:15:02.797963 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55b4c\" (UniqueName: \"kubernetes.io/projected/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-kube-api-access-55b4c\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:02 crc kubenswrapper[4689]: I1013 21:15:02.798016 4689 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:02 crc kubenswrapper[4689]: I1013 21:15:02.798028 4689 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6-config-volume\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:03 crc kubenswrapper[4689]: I1013 21:15:03.410844 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj" event={"ID":"8c4cf8f8-edb9-4a23-832a-bf70c05cdce6","Type":"ContainerDied","Data":"dfc6c6e94ed52350f346441bd0d8bebe475fd7c5f500bb0d1140c8c22d783d1f"} Oct 13 21:15:03 crc kubenswrapper[4689]: I1013 21:15:03.410881 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dfc6c6e94ed52350f346441bd0d8bebe475fd7c5f500bb0d1140c8c22d783d1f" Oct 13 21:15:03 crc kubenswrapper[4689]: I1013 21:15:03.410939 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj" Oct 13 21:15:04 crc kubenswrapper[4689]: I1013 21:15:04.536757 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7gghw"] Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.566159 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" podUID="9db38791-f017-4d4f-b9e9-08f3ccd38704" containerName="oauth-openshift" containerID="cri-o://0fa25d8e73b20b440184669215b51e26c0eece8e5c65ad45e6dff7cb6233b39f" gracePeriod=15 Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.928881 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.966339 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-86d85988f6-lzckz"] Oct 13 21:15:29 crc kubenswrapper[4689]: E1013 21:15:29.966568 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c4cf8f8-edb9-4a23-832a-bf70c05cdce6" containerName="collect-profiles" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.966602 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c4cf8f8-edb9-4a23-832a-bf70c05cdce6" containerName="collect-profiles" Oct 13 21:15:29 crc kubenswrapper[4689]: E1013 21:15:29.966628 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9db38791-f017-4d4f-b9e9-08f3ccd38704" containerName="oauth-openshift" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.966637 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="9db38791-f017-4d4f-b9e9-08f3ccd38704" containerName="oauth-openshift" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.966757 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="9db38791-f017-4d4f-b9e9-08f3ccd38704" containerName="oauth-openshift" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.966783 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c4cf8f8-edb9-4a23-832a-bf70c05cdce6" containerName="collect-profiles" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.967206 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.985576 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-86d85988f6-lzckz"] Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994256 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-provider-selection\") pod \"9db38791-f017-4d4f-b9e9-08f3ccd38704\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994346 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-session\") pod \"9db38791-f017-4d4f-b9e9-08f3ccd38704\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994378 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-router-certs\") pod \"9db38791-f017-4d4f-b9e9-08f3ccd38704\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994404 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8gm8\" (UniqueName: \"kubernetes.io/projected/9db38791-f017-4d4f-b9e9-08f3ccd38704-kube-api-access-r8gm8\") pod \"9db38791-f017-4d4f-b9e9-08f3ccd38704\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994422 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-error\") pod \"9db38791-f017-4d4f-b9e9-08f3ccd38704\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994446 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-serving-cert\") pod \"9db38791-f017-4d4f-b9e9-08f3ccd38704\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994472 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-audit-policies\") pod \"9db38791-f017-4d4f-b9e9-08f3ccd38704\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994491 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-ocp-branding-template\") pod \"9db38791-f017-4d4f-b9e9-08f3ccd38704\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994508 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-service-ca\") pod \"9db38791-f017-4d4f-b9e9-08f3ccd38704\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994525 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-cliconfig\") pod \"9db38791-f017-4d4f-b9e9-08f3ccd38704\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994545 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-idp-0-file-data\") pod \"9db38791-f017-4d4f-b9e9-08f3ccd38704\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994564 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-login\") pod \"9db38791-f017-4d4f-b9e9-08f3ccd38704\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994581 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9db38791-f017-4d4f-b9e9-08f3ccd38704-audit-dir\") pod \"9db38791-f017-4d4f-b9e9-08f3ccd38704\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994620 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-trusted-ca-bundle\") pod \"9db38791-f017-4d4f-b9e9-08f3ccd38704\" (UID: \"9db38791-f017-4d4f-b9e9-08f3ccd38704\") " Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994776 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-router-certs\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994819 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994843 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-user-template-error\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994872 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-session\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994889 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994909 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-user-template-login\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994927 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994947 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-serving-cert\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994965 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8687a492-2b77-4d50-9f7d-467e08b44f15-audit-policies\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.994989 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8687a492-2b77-4d50-9f7d-467e08b44f15-audit-dir\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.995007 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.995029 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-service-ca\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.995056 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xk7zw\" (UniqueName: \"kubernetes.io/projected/8687a492-2b77-4d50-9f7d-467e08b44f15-kube-api-access-xk7zw\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.995073 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-cliconfig\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.996711 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "9db38791-f017-4d4f-b9e9-08f3ccd38704" (UID: "9db38791-f017-4d4f-b9e9-08f3ccd38704"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.996756 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "9db38791-f017-4d4f-b9e9-08f3ccd38704" (UID: "9db38791-f017-4d4f-b9e9-08f3ccd38704"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.997123 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "9db38791-f017-4d4f-b9e9-08f3ccd38704" (UID: "9db38791-f017-4d4f-b9e9-08f3ccd38704"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.997173 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9db38791-f017-4d4f-b9e9-08f3ccd38704-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "9db38791-f017-4d4f-b9e9-08f3ccd38704" (UID: "9db38791-f017-4d4f-b9e9-08f3ccd38704"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:15:29 crc kubenswrapper[4689]: I1013 21:15:29.997573 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "9db38791-f017-4d4f-b9e9-08f3ccd38704" (UID: "9db38791-f017-4d4f-b9e9-08f3ccd38704"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.002858 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "9db38791-f017-4d4f-b9e9-08f3ccd38704" (UID: "9db38791-f017-4d4f-b9e9-08f3ccd38704"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.003191 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9db38791-f017-4d4f-b9e9-08f3ccd38704-kube-api-access-r8gm8" (OuterVolumeSpecName: "kube-api-access-r8gm8") pod "9db38791-f017-4d4f-b9e9-08f3ccd38704" (UID: "9db38791-f017-4d4f-b9e9-08f3ccd38704"). InnerVolumeSpecName "kube-api-access-r8gm8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.003235 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "9db38791-f017-4d4f-b9e9-08f3ccd38704" (UID: "9db38791-f017-4d4f-b9e9-08f3ccd38704"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.004426 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "9db38791-f017-4d4f-b9e9-08f3ccd38704" (UID: "9db38791-f017-4d4f-b9e9-08f3ccd38704"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.019248 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "9db38791-f017-4d4f-b9e9-08f3ccd38704" (UID: "9db38791-f017-4d4f-b9e9-08f3ccd38704"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.019811 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "9db38791-f017-4d4f-b9e9-08f3ccd38704" (UID: "9db38791-f017-4d4f-b9e9-08f3ccd38704"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.020232 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "9db38791-f017-4d4f-b9e9-08f3ccd38704" (UID: "9db38791-f017-4d4f-b9e9-08f3ccd38704"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.020507 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "9db38791-f017-4d4f-b9e9-08f3ccd38704" (UID: "9db38791-f017-4d4f-b9e9-08f3ccd38704"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.021368 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "9db38791-f017-4d4f-b9e9-08f3ccd38704" (UID: "9db38791-f017-4d4f-b9e9-08f3ccd38704"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.096087 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-session\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.096177 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.096219 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-user-template-login\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.096255 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.096301 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8687a492-2b77-4d50-9f7d-467e08b44f15-audit-policies\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.096335 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-serving-cert\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.096383 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8687a492-2b77-4d50-9f7d-467e08b44f15-audit-dir\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.096420 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.096461 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-service-ca\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.096516 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xk7zw\" (UniqueName: \"kubernetes.io/projected/8687a492-2b77-4d50-9f7d-467e08b44f15-kube-api-access-xk7zw\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.096556 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-cliconfig\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.096627 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-router-certs\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.096670 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.096810 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-user-template-error\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.096839 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8687a492-2b77-4d50-9f7d-467e08b44f15-audit-dir\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.097053 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.097077 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.097094 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8gm8\" (UniqueName: \"kubernetes.io/projected/9db38791-f017-4d4f-b9e9-08f3ccd38704-kube-api-access-r8gm8\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.097109 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.097124 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.097140 4689 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.097155 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.097171 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.097185 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.097202 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.097217 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.097233 4689 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9db38791-f017-4d4f-b9e9-08f3ccd38704-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.097247 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.097267 4689 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9db38791-f017-4d4f-b9e9-08f3ccd38704-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.098279 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-service-ca\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.098356 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8687a492-2b77-4d50-9f7d-467e08b44f15-audit-policies\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.099246 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.099258 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-cliconfig\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.102922 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.103012 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.103325 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-session\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.103927 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-serving-cert\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.104668 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-user-template-login\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.105311 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-user-template-error\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.106139 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-router-certs\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.106824 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8687a492-2b77-4d50-9f7d-467e08b44f15-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.118491 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xk7zw\" (UniqueName: \"kubernetes.io/projected/8687a492-2b77-4d50-9f7d-467e08b44f15-kube-api-access-xk7zw\") pod \"oauth-openshift-86d85988f6-lzckz\" (UID: \"8687a492-2b77-4d50-9f7d-467e08b44f15\") " pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.285882 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.579491 4689 generic.go:334] "Generic (PLEG): container finished" podID="9db38791-f017-4d4f-b9e9-08f3ccd38704" containerID="0fa25d8e73b20b440184669215b51e26c0eece8e5c65ad45e6dff7cb6233b39f" exitCode=0 Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.579556 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" event={"ID":"9db38791-f017-4d4f-b9e9-08f3ccd38704","Type":"ContainerDied","Data":"0fa25d8e73b20b440184669215b51e26c0eece8e5c65ad45e6dff7cb6233b39f"} Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.579627 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" event={"ID":"9db38791-f017-4d4f-b9e9-08f3ccd38704","Type":"ContainerDied","Data":"6b3a33207043212cfdf02f1cd789d4cd549c5c906369b37461ea82e415f38470"} Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.579658 4689 scope.go:117] "RemoveContainer" containerID="0fa25d8e73b20b440184669215b51e26c0eece8e5c65ad45e6dff7cb6233b39f" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.579821 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7gghw" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.580239 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-86d85988f6-lzckz"] Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.604521 4689 scope.go:117] "RemoveContainer" containerID="0fa25d8e73b20b440184669215b51e26c0eece8e5c65ad45e6dff7cb6233b39f" Oct 13 21:15:30 crc kubenswrapper[4689]: E1013 21:15:30.605181 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0fa25d8e73b20b440184669215b51e26c0eece8e5c65ad45e6dff7cb6233b39f\": container with ID starting with 0fa25d8e73b20b440184669215b51e26c0eece8e5c65ad45e6dff7cb6233b39f not found: ID does not exist" containerID="0fa25d8e73b20b440184669215b51e26c0eece8e5c65ad45e6dff7cb6233b39f" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.605228 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fa25d8e73b20b440184669215b51e26c0eece8e5c65ad45e6dff7cb6233b39f"} err="failed to get container status \"0fa25d8e73b20b440184669215b51e26c0eece8e5c65ad45e6dff7cb6233b39f\": rpc error: code = NotFound desc = could not find container \"0fa25d8e73b20b440184669215b51e26c0eece8e5c65ad45e6dff7cb6233b39f\": container with ID starting with 0fa25d8e73b20b440184669215b51e26c0eece8e5c65ad45e6dff7cb6233b39f not found: ID does not exist" Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.620412 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7gghw"] Oct 13 21:15:30 crc kubenswrapper[4689]: I1013 21:15:30.622967 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7gghw"] Oct 13 21:15:31 crc kubenswrapper[4689]: I1013 21:15:31.589562 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" event={"ID":"8687a492-2b77-4d50-9f7d-467e08b44f15","Type":"ContainerStarted","Data":"a9a3f5229a1fe306e9cd660987c841e2e2f2774e58104b2a32d3893dfe503ef3"} Oct 13 21:15:31 crc kubenswrapper[4689]: I1013 21:15:31.590088 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:31 crc kubenswrapper[4689]: I1013 21:15:31.590116 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" event={"ID":"8687a492-2b77-4d50-9f7d-467e08b44f15","Type":"ContainerStarted","Data":"ceae5f54f19610361f3c84abc051dbc62b74f30da01df7833a9247698f3ba75a"} Oct 13 21:15:31 crc kubenswrapper[4689]: I1013 21:15:31.598872 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" Oct 13 21:15:31 crc kubenswrapper[4689]: I1013 21:15:31.613770 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-86d85988f6-lzckz" podStartSLOduration=27.613748581 podStartE2EDuration="27.613748581s" podCreationTimestamp="2025-10-13 21:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:15:31.612359122 +0000 UTC m=+248.530604237" watchObservedRunningTime="2025-10-13 21:15:31.613748581 +0000 UTC m=+248.531993666" Oct 13 21:15:31 crc kubenswrapper[4689]: I1013 21:15:31.873570 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9db38791-f017-4d4f-b9e9-08f3ccd38704" path="/var/lib/kubelet/pods/9db38791-f017-4d4f-b9e9-08f3ccd38704/volumes" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.211673 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bk9hg"] Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.214029 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bk9hg" podUID="d02f5e12-ab55-4649-94db-f248e569c2d1" containerName="registry-server" containerID="cri-o://5b89e2aa3f93fd8f1cd63b43e50c7f27841f4c4772a3de8f60a2509c88358aca" gracePeriod=30 Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.215523 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jvx7z"] Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.215907 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jvx7z" podUID="0ce9ebc5-b00b-4f83-8420-6a58b073efa5" containerName="registry-server" containerID="cri-o://5c1942f4a915ee2044d93c645c9b9464113ee8d701d19bd9bd297b2ae8bbd91b" gracePeriod=30 Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.230876 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mql5t"] Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.231818 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" podUID="d3229a49-3424-464a-b479-460fb0a21620" containerName="marketplace-operator" containerID="cri-o://a3d0910cde11fdad182160966055192ffb84434562bb849c66e8b730ecc41cd7" gracePeriod=30 Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.234861 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nxrzk"] Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.235302 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nxrzk" podUID="0dabb64b-b7fc-4428-bcfe-10c98dbe797b" containerName="registry-server" containerID="cri-o://58e5d6a59c834a39ec3749455c13e2bdd6abed29782f6c674cdbf7c8560ff59c" gracePeriod=30 Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.250038 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-smc4t"] Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.251085 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-smc4t" podUID="8df17524-724f-469d-a215-26d230f8d2ce" containerName="registry-server" containerID="cri-o://1c14d97dced57abde3daa3491f29ced6a657f87ab477158e6ec090bea280e03b" gracePeriod=30 Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.255357 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c55dv"] Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.256388 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-c55dv" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.268341 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c55dv"] Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.289923 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6bdfaec3-47bd-4ca1-98f5-a5af88e0d075-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-c55dv\" (UID: \"6bdfaec3-47bd-4ca1-98f5-a5af88e0d075\") " pod="openshift-marketplace/marketplace-operator-79b997595-c55dv" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.290012 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6bdfaec3-47bd-4ca1-98f5-a5af88e0d075-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-c55dv\" (UID: \"6bdfaec3-47bd-4ca1-98f5-a5af88e0d075\") " pod="openshift-marketplace/marketplace-operator-79b997595-c55dv" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.290042 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7g9td\" (UniqueName: \"kubernetes.io/projected/6bdfaec3-47bd-4ca1-98f5-a5af88e0d075-kube-api-access-7g9td\") pod \"marketplace-operator-79b997595-c55dv\" (UID: \"6bdfaec3-47bd-4ca1-98f5-a5af88e0d075\") " pod="openshift-marketplace/marketplace-operator-79b997595-c55dv" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.391333 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6bdfaec3-47bd-4ca1-98f5-a5af88e0d075-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-c55dv\" (UID: \"6bdfaec3-47bd-4ca1-98f5-a5af88e0d075\") " pod="openshift-marketplace/marketplace-operator-79b997595-c55dv" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.391823 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7g9td\" (UniqueName: \"kubernetes.io/projected/6bdfaec3-47bd-4ca1-98f5-a5af88e0d075-kube-api-access-7g9td\") pod \"marketplace-operator-79b997595-c55dv\" (UID: \"6bdfaec3-47bd-4ca1-98f5-a5af88e0d075\") " pod="openshift-marketplace/marketplace-operator-79b997595-c55dv" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.391885 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6bdfaec3-47bd-4ca1-98f5-a5af88e0d075-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-c55dv\" (UID: \"6bdfaec3-47bd-4ca1-98f5-a5af88e0d075\") " pod="openshift-marketplace/marketplace-operator-79b997595-c55dv" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.393206 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6bdfaec3-47bd-4ca1-98f5-a5af88e0d075-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-c55dv\" (UID: \"6bdfaec3-47bd-4ca1-98f5-a5af88e0d075\") " pod="openshift-marketplace/marketplace-operator-79b997595-c55dv" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.405332 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6bdfaec3-47bd-4ca1-98f5-a5af88e0d075-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-c55dv\" (UID: \"6bdfaec3-47bd-4ca1-98f5-a5af88e0d075\") " pod="openshift-marketplace/marketplace-operator-79b997595-c55dv" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.414208 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7g9td\" (UniqueName: \"kubernetes.io/projected/6bdfaec3-47bd-4ca1-98f5-a5af88e0d075-kube-api-access-7g9td\") pod \"marketplace-operator-79b997595-c55dv\" (UID: \"6bdfaec3-47bd-4ca1-98f5-a5af88e0d075\") " pod="openshift-marketplace/marketplace-operator-79b997595-c55dv" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.658838 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-c55dv" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.664263 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bk9hg" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.669358 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jvx7z" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.686101 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nxrzk" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.696732 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-smc4t" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.723248 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.737388 4689 generic.go:334] "Generic (PLEG): container finished" podID="0dabb64b-b7fc-4428-bcfe-10c98dbe797b" containerID="58e5d6a59c834a39ec3749455c13e2bdd6abed29782f6c674cdbf7c8560ff59c" exitCode=0 Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.737480 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nxrzk" event={"ID":"0dabb64b-b7fc-4428-bcfe-10c98dbe797b","Type":"ContainerDied","Data":"58e5d6a59c834a39ec3749455c13e2bdd6abed29782f6c674cdbf7c8560ff59c"} Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.737531 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nxrzk" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.737559 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nxrzk" event={"ID":"0dabb64b-b7fc-4428-bcfe-10c98dbe797b","Type":"ContainerDied","Data":"0aa98261fad351cee85a8aeb3168224d50f2f0427dcc62f18121adbe83715093"} Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.737608 4689 scope.go:117] "RemoveContainer" containerID="58e5d6a59c834a39ec3749455c13e2bdd6abed29782f6c674cdbf7c8560ff59c" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.748450 4689 generic.go:334] "Generic (PLEG): container finished" podID="8df17524-724f-469d-a215-26d230f8d2ce" containerID="1c14d97dced57abde3daa3491f29ced6a657f87ab477158e6ec090bea280e03b" exitCode=0 Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.748545 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smc4t" event={"ID":"8df17524-724f-469d-a215-26d230f8d2ce","Type":"ContainerDied","Data":"1c14d97dced57abde3daa3491f29ced6a657f87ab477158e6ec090bea280e03b"} Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.748603 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-smc4t" event={"ID":"8df17524-724f-469d-a215-26d230f8d2ce","Type":"ContainerDied","Data":"cebd5db9fd2258ec62b0c02c47d36b01fd3bc427fbaa8fb4cffec66400ea0c86"} Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.748717 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-smc4t" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.751522 4689 generic.go:334] "Generic (PLEG): container finished" podID="d3229a49-3424-464a-b479-460fb0a21620" containerID="a3d0910cde11fdad182160966055192ffb84434562bb849c66e8b730ecc41cd7" exitCode=0 Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.751575 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" event={"ID":"d3229a49-3424-464a-b479-460fb0a21620","Type":"ContainerDied","Data":"a3d0910cde11fdad182160966055192ffb84434562bb849c66e8b730ecc41cd7"} Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.751612 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" event={"ID":"d3229a49-3424-464a-b479-460fb0a21620","Type":"ContainerDied","Data":"b827631fad039b9ceca3764aa217040de6494aef1ddd76364ad99aa2fc73521a"} Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.751711 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mql5t" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.755813 4689 generic.go:334] "Generic (PLEG): container finished" podID="d02f5e12-ab55-4649-94db-f248e569c2d1" containerID="5b89e2aa3f93fd8f1cd63b43e50c7f27841f4c4772a3de8f60a2509c88358aca" exitCode=0 Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.755892 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bk9hg" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.755907 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bk9hg" event={"ID":"d02f5e12-ab55-4649-94db-f248e569c2d1","Type":"ContainerDied","Data":"5b89e2aa3f93fd8f1cd63b43e50c7f27841f4c4772a3de8f60a2509c88358aca"} Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.755946 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bk9hg" event={"ID":"d02f5e12-ab55-4649-94db-f248e569c2d1","Type":"ContainerDied","Data":"5b459ae86c7554e28a826b3467d0668cdeda27f7c40831775d24cea5a319af2f"} Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.759195 4689 generic.go:334] "Generic (PLEG): container finished" podID="0ce9ebc5-b00b-4f83-8420-6a58b073efa5" containerID="5c1942f4a915ee2044d93c645c9b9464113ee8d701d19bd9bd297b2ae8bbd91b" exitCode=0 Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.759269 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jvx7z" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.759261 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jvx7z" event={"ID":"0ce9ebc5-b00b-4f83-8420-6a58b073efa5","Type":"ContainerDied","Data":"5c1942f4a915ee2044d93c645c9b9464113ee8d701d19bd9bd297b2ae8bbd91b"} Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.759383 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jvx7z" event={"ID":"0ce9ebc5-b00b-4f83-8420-6a58b073efa5","Type":"ContainerDied","Data":"773c4bb0ad1b9d3251f66979be419292d9554020e445154a0c872187fffe3a8f"} Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.786991 4689 scope.go:117] "RemoveContainer" containerID="c9131b80a4421886b395b3c11218eeb513256ffb42ad4252057c3e9f1e45ef0d" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.804186 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fh74k\" (UniqueName: \"kubernetes.io/projected/8df17524-724f-469d-a215-26d230f8d2ce-kube-api-access-fh74k\") pod \"8df17524-724f-469d-a215-26d230f8d2ce\" (UID: \"8df17524-724f-469d-a215-26d230f8d2ce\") " Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.804269 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62spr\" (UniqueName: \"kubernetes.io/projected/d02f5e12-ab55-4649-94db-f248e569c2d1-kube-api-access-62spr\") pod \"d02f5e12-ab55-4649-94db-f248e569c2d1\" (UID: \"d02f5e12-ab55-4649-94db-f248e569c2d1\") " Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.804306 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-catalog-content\") pod \"0dabb64b-b7fc-4428-bcfe-10c98dbe797b\" (UID: \"0dabb64b-b7fc-4428-bcfe-10c98dbe797b\") " Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.804344 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d02f5e12-ab55-4649-94db-f248e569c2d1-utilities\") pod \"d02f5e12-ab55-4649-94db-f248e569c2d1\" (UID: \"d02f5e12-ab55-4649-94db-f248e569c2d1\") " Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.804373 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-utilities\") pod \"0dabb64b-b7fc-4428-bcfe-10c98dbe797b\" (UID: \"0dabb64b-b7fc-4428-bcfe-10c98dbe797b\") " Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.804438 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8df17524-724f-469d-a215-26d230f8d2ce-utilities\") pod \"8df17524-724f-469d-a215-26d230f8d2ce\" (UID: \"8df17524-724f-469d-a215-26d230f8d2ce\") " Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.804464 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sptv4\" (UniqueName: \"kubernetes.io/projected/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-kube-api-access-sptv4\") pod \"0ce9ebc5-b00b-4f83-8420-6a58b073efa5\" (UID: \"0ce9ebc5-b00b-4f83-8420-6a58b073efa5\") " Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.804531 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnbhw\" (UniqueName: \"kubernetes.io/projected/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-kube-api-access-qnbhw\") pod \"0dabb64b-b7fc-4428-bcfe-10c98dbe797b\" (UID: \"0dabb64b-b7fc-4428-bcfe-10c98dbe797b\") " Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.804556 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d02f5e12-ab55-4649-94db-f248e569c2d1-catalog-content\") pod \"d02f5e12-ab55-4649-94db-f248e569c2d1\" (UID: \"d02f5e12-ab55-4649-94db-f248e569c2d1\") " Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.805047 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8df17524-724f-469d-a215-26d230f8d2ce-catalog-content\") pod \"8df17524-724f-469d-a215-26d230f8d2ce\" (UID: \"8df17524-724f-469d-a215-26d230f8d2ce\") " Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.805093 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-catalog-content\") pod \"0ce9ebc5-b00b-4f83-8420-6a58b073efa5\" (UID: \"0ce9ebc5-b00b-4f83-8420-6a58b073efa5\") " Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.805158 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-utilities\") pod \"0ce9ebc5-b00b-4f83-8420-6a58b073efa5\" (UID: \"0ce9ebc5-b00b-4f83-8420-6a58b073efa5\") " Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.805701 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-utilities" (OuterVolumeSpecName: "utilities") pod "0dabb64b-b7fc-4428-bcfe-10c98dbe797b" (UID: "0dabb64b-b7fc-4428-bcfe-10c98dbe797b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.807236 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8df17524-724f-469d-a215-26d230f8d2ce-utilities" (OuterVolumeSpecName: "utilities") pod "8df17524-724f-469d-a215-26d230f8d2ce" (UID: "8df17524-724f-469d-a215-26d230f8d2ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.808296 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-utilities" (OuterVolumeSpecName: "utilities") pod "0ce9ebc5-b00b-4f83-8420-6a58b073efa5" (UID: "0ce9ebc5-b00b-4f83-8420-6a58b073efa5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.814692 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-kube-api-access-sptv4" (OuterVolumeSpecName: "kube-api-access-sptv4") pod "0ce9ebc5-b00b-4f83-8420-6a58b073efa5" (UID: "0ce9ebc5-b00b-4f83-8420-6a58b073efa5"). InnerVolumeSpecName "kube-api-access-sptv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.815196 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-kube-api-access-qnbhw" (OuterVolumeSpecName: "kube-api-access-qnbhw") pod "0dabb64b-b7fc-4428-bcfe-10c98dbe797b" (UID: "0dabb64b-b7fc-4428-bcfe-10c98dbe797b"). InnerVolumeSpecName "kube-api-access-qnbhw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.816007 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8df17524-724f-469d-a215-26d230f8d2ce-kube-api-access-fh74k" (OuterVolumeSpecName: "kube-api-access-fh74k") pod "8df17524-724f-469d-a215-26d230f8d2ce" (UID: "8df17524-724f-469d-a215-26d230f8d2ce"). InnerVolumeSpecName "kube-api-access-fh74k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.816094 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d02f5e12-ab55-4649-94db-f248e569c2d1-utilities" (OuterVolumeSpecName: "utilities") pod "d02f5e12-ab55-4649-94db-f248e569c2d1" (UID: "d02f5e12-ab55-4649-94db-f248e569c2d1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.828317 4689 scope.go:117] "RemoveContainer" containerID="fe02a009ceab8edba50acf2decc0f02ac800cf102dc3290147bb1bd1ff7ae594" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.829014 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d02f5e12-ab55-4649-94db-f248e569c2d1-kube-api-access-62spr" (OuterVolumeSpecName: "kube-api-access-62spr") pod "d02f5e12-ab55-4649-94db-f248e569c2d1" (UID: "d02f5e12-ab55-4649-94db-f248e569c2d1"). InnerVolumeSpecName "kube-api-access-62spr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.843059 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0dabb64b-b7fc-4428-bcfe-10c98dbe797b" (UID: "0dabb64b-b7fc-4428-bcfe-10c98dbe797b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.857453 4689 scope.go:117] "RemoveContainer" containerID="58e5d6a59c834a39ec3749455c13e2bdd6abed29782f6c674cdbf7c8560ff59c" Oct 13 21:15:49 crc kubenswrapper[4689]: E1013 21:15:49.858169 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58e5d6a59c834a39ec3749455c13e2bdd6abed29782f6c674cdbf7c8560ff59c\": container with ID starting with 58e5d6a59c834a39ec3749455c13e2bdd6abed29782f6c674cdbf7c8560ff59c not found: ID does not exist" containerID="58e5d6a59c834a39ec3749455c13e2bdd6abed29782f6c674cdbf7c8560ff59c" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.858258 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58e5d6a59c834a39ec3749455c13e2bdd6abed29782f6c674cdbf7c8560ff59c"} err="failed to get container status \"58e5d6a59c834a39ec3749455c13e2bdd6abed29782f6c674cdbf7c8560ff59c\": rpc error: code = NotFound desc = could not find container \"58e5d6a59c834a39ec3749455c13e2bdd6abed29782f6c674cdbf7c8560ff59c\": container with ID starting with 58e5d6a59c834a39ec3749455c13e2bdd6abed29782f6c674cdbf7c8560ff59c not found: ID does not exist" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.858296 4689 scope.go:117] "RemoveContainer" containerID="c9131b80a4421886b395b3c11218eeb513256ffb42ad4252057c3e9f1e45ef0d" Oct 13 21:15:49 crc kubenswrapper[4689]: E1013 21:15:49.859658 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9131b80a4421886b395b3c11218eeb513256ffb42ad4252057c3e9f1e45ef0d\": container with ID starting with c9131b80a4421886b395b3c11218eeb513256ffb42ad4252057c3e9f1e45ef0d not found: ID does not exist" containerID="c9131b80a4421886b395b3c11218eeb513256ffb42ad4252057c3e9f1e45ef0d" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.859688 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9131b80a4421886b395b3c11218eeb513256ffb42ad4252057c3e9f1e45ef0d"} err="failed to get container status \"c9131b80a4421886b395b3c11218eeb513256ffb42ad4252057c3e9f1e45ef0d\": rpc error: code = NotFound desc = could not find container \"c9131b80a4421886b395b3c11218eeb513256ffb42ad4252057c3e9f1e45ef0d\": container with ID starting with c9131b80a4421886b395b3c11218eeb513256ffb42ad4252057c3e9f1e45ef0d not found: ID does not exist" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.859711 4689 scope.go:117] "RemoveContainer" containerID="fe02a009ceab8edba50acf2decc0f02ac800cf102dc3290147bb1bd1ff7ae594" Oct 13 21:15:49 crc kubenswrapper[4689]: E1013 21:15:49.860157 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe02a009ceab8edba50acf2decc0f02ac800cf102dc3290147bb1bd1ff7ae594\": container with ID starting with fe02a009ceab8edba50acf2decc0f02ac800cf102dc3290147bb1bd1ff7ae594 not found: ID does not exist" containerID="fe02a009ceab8edba50acf2decc0f02ac800cf102dc3290147bb1bd1ff7ae594" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.860191 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe02a009ceab8edba50acf2decc0f02ac800cf102dc3290147bb1bd1ff7ae594"} err="failed to get container status \"fe02a009ceab8edba50acf2decc0f02ac800cf102dc3290147bb1bd1ff7ae594\": rpc error: code = NotFound desc = could not find container \"fe02a009ceab8edba50acf2decc0f02ac800cf102dc3290147bb1bd1ff7ae594\": container with ID starting with fe02a009ceab8edba50acf2decc0f02ac800cf102dc3290147bb1bd1ff7ae594 not found: ID does not exist" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.860215 4689 scope.go:117] "RemoveContainer" containerID="1c14d97dced57abde3daa3491f29ced6a657f87ab477158e6ec090bea280e03b" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.885098 4689 scope.go:117] "RemoveContainer" containerID="dcd0c1d9433caf9d88bd254e9e76f3ca8eea5f4eee4330077c68aef4380ad9f3" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.906395 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d3229a49-3424-464a-b479-460fb0a21620-marketplace-operator-metrics\") pod \"d3229a49-3424-464a-b479-460fb0a21620\" (UID: \"d3229a49-3424-464a-b479-460fb0a21620\") " Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.906533 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8tzj\" (UniqueName: \"kubernetes.io/projected/d3229a49-3424-464a-b479-460fb0a21620-kube-api-access-h8tzj\") pod \"d3229a49-3424-464a-b479-460fb0a21620\" (UID: \"d3229a49-3424-464a-b479-460fb0a21620\") " Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.906637 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3229a49-3424-464a-b479-460fb0a21620-marketplace-trusted-ca\") pod \"d3229a49-3424-464a-b479-460fb0a21620\" (UID: \"d3229a49-3424-464a-b479-460fb0a21620\") " Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.906993 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fh74k\" (UniqueName: \"kubernetes.io/projected/8df17524-724f-469d-a215-26d230f8d2ce-kube-api-access-fh74k\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.907015 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62spr\" (UniqueName: \"kubernetes.io/projected/d02f5e12-ab55-4649-94db-f248e569c2d1-kube-api-access-62spr\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.907029 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.907040 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d02f5e12-ab55-4649-94db-f248e569c2d1-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.907053 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.907063 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8df17524-724f-469d-a215-26d230f8d2ce-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.907078 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sptv4\" (UniqueName: \"kubernetes.io/projected/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-kube-api-access-sptv4\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.907090 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnbhw\" (UniqueName: \"kubernetes.io/projected/0dabb64b-b7fc-4428-bcfe-10c98dbe797b-kube-api-access-qnbhw\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.907104 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.907959 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3229a49-3424-464a-b479-460fb0a21620-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "d3229a49-3424-464a-b479-460fb0a21620" (UID: "d3229a49-3424-464a-b479-460fb0a21620"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.908973 4689 scope.go:117] "RemoveContainer" containerID="a60168f61fb8773f94ce69ea551a833497b0ca72f98f9900eb59090accfbee36" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.913775 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3229a49-3424-464a-b479-460fb0a21620-kube-api-access-h8tzj" (OuterVolumeSpecName: "kube-api-access-h8tzj") pod "d3229a49-3424-464a-b479-460fb0a21620" (UID: "d3229a49-3424-464a-b479-460fb0a21620"). InnerVolumeSpecName "kube-api-access-h8tzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.913833 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3229a49-3424-464a-b479-460fb0a21620-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "d3229a49-3424-464a-b479-460fb0a21620" (UID: "d3229a49-3424-464a-b479-460fb0a21620"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.914074 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d02f5e12-ab55-4649-94db-f248e569c2d1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d02f5e12-ab55-4649-94db-f248e569c2d1" (UID: "d02f5e12-ab55-4649-94db-f248e569c2d1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.917100 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0ce9ebc5-b00b-4f83-8420-6a58b073efa5" (UID: "0ce9ebc5-b00b-4f83-8420-6a58b073efa5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.929625 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8df17524-724f-469d-a215-26d230f8d2ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8df17524-724f-469d-a215-26d230f8d2ce" (UID: "8df17524-724f-469d-a215-26d230f8d2ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.952439 4689 scope.go:117] "RemoveContainer" containerID="1c14d97dced57abde3daa3491f29ced6a657f87ab477158e6ec090bea280e03b" Oct 13 21:15:49 crc kubenswrapper[4689]: E1013 21:15:49.952936 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c14d97dced57abde3daa3491f29ced6a657f87ab477158e6ec090bea280e03b\": container with ID starting with 1c14d97dced57abde3daa3491f29ced6a657f87ab477158e6ec090bea280e03b not found: ID does not exist" containerID="1c14d97dced57abde3daa3491f29ced6a657f87ab477158e6ec090bea280e03b" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.952970 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c14d97dced57abde3daa3491f29ced6a657f87ab477158e6ec090bea280e03b"} err="failed to get container status \"1c14d97dced57abde3daa3491f29ced6a657f87ab477158e6ec090bea280e03b\": rpc error: code = NotFound desc = could not find container \"1c14d97dced57abde3daa3491f29ced6a657f87ab477158e6ec090bea280e03b\": container with ID starting with 1c14d97dced57abde3daa3491f29ced6a657f87ab477158e6ec090bea280e03b not found: ID does not exist" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.953000 4689 scope.go:117] "RemoveContainer" containerID="dcd0c1d9433caf9d88bd254e9e76f3ca8eea5f4eee4330077c68aef4380ad9f3" Oct 13 21:15:49 crc kubenswrapper[4689]: E1013 21:15:49.953350 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcd0c1d9433caf9d88bd254e9e76f3ca8eea5f4eee4330077c68aef4380ad9f3\": container with ID starting with dcd0c1d9433caf9d88bd254e9e76f3ca8eea5f4eee4330077c68aef4380ad9f3 not found: ID does not exist" containerID="dcd0c1d9433caf9d88bd254e9e76f3ca8eea5f4eee4330077c68aef4380ad9f3" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.953372 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcd0c1d9433caf9d88bd254e9e76f3ca8eea5f4eee4330077c68aef4380ad9f3"} err="failed to get container status \"dcd0c1d9433caf9d88bd254e9e76f3ca8eea5f4eee4330077c68aef4380ad9f3\": rpc error: code = NotFound desc = could not find container \"dcd0c1d9433caf9d88bd254e9e76f3ca8eea5f4eee4330077c68aef4380ad9f3\": container with ID starting with dcd0c1d9433caf9d88bd254e9e76f3ca8eea5f4eee4330077c68aef4380ad9f3 not found: ID does not exist" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.953389 4689 scope.go:117] "RemoveContainer" containerID="a60168f61fb8773f94ce69ea551a833497b0ca72f98f9900eb59090accfbee36" Oct 13 21:15:49 crc kubenswrapper[4689]: E1013 21:15:49.953934 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a60168f61fb8773f94ce69ea551a833497b0ca72f98f9900eb59090accfbee36\": container with ID starting with a60168f61fb8773f94ce69ea551a833497b0ca72f98f9900eb59090accfbee36 not found: ID does not exist" containerID="a60168f61fb8773f94ce69ea551a833497b0ca72f98f9900eb59090accfbee36" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.953955 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a60168f61fb8773f94ce69ea551a833497b0ca72f98f9900eb59090accfbee36"} err="failed to get container status \"a60168f61fb8773f94ce69ea551a833497b0ca72f98f9900eb59090accfbee36\": rpc error: code = NotFound desc = could not find container \"a60168f61fb8773f94ce69ea551a833497b0ca72f98f9900eb59090accfbee36\": container with ID starting with a60168f61fb8773f94ce69ea551a833497b0ca72f98f9900eb59090accfbee36 not found: ID does not exist" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.953972 4689 scope.go:117] "RemoveContainer" containerID="a3d0910cde11fdad182160966055192ffb84434562bb849c66e8b730ecc41cd7" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.968662 4689 scope.go:117] "RemoveContainer" containerID="a3d0910cde11fdad182160966055192ffb84434562bb849c66e8b730ecc41cd7" Oct 13 21:15:49 crc kubenswrapper[4689]: E1013 21:15:49.970091 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3d0910cde11fdad182160966055192ffb84434562bb849c66e8b730ecc41cd7\": container with ID starting with a3d0910cde11fdad182160966055192ffb84434562bb849c66e8b730ecc41cd7 not found: ID does not exist" containerID="a3d0910cde11fdad182160966055192ffb84434562bb849c66e8b730ecc41cd7" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.970116 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3d0910cde11fdad182160966055192ffb84434562bb849c66e8b730ecc41cd7"} err="failed to get container status \"a3d0910cde11fdad182160966055192ffb84434562bb849c66e8b730ecc41cd7\": rpc error: code = NotFound desc = could not find container \"a3d0910cde11fdad182160966055192ffb84434562bb849c66e8b730ecc41cd7\": container with ID starting with a3d0910cde11fdad182160966055192ffb84434562bb849c66e8b730ecc41cd7 not found: ID does not exist" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.970166 4689 scope.go:117] "RemoveContainer" containerID="5b89e2aa3f93fd8f1cd63b43e50c7f27841f4c4772a3de8f60a2509c88358aca" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.984670 4689 scope.go:117] "RemoveContainer" containerID="59146a5dd362bc2c9a6c62dd2cee635d0924f5f0585d065a8af1617dfa2f72ec" Oct 13 21:15:49 crc kubenswrapper[4689]: I1013 21:15:49.998911 4689 scope.go:117] "RemoveContainer" containerID="c37a8e9d4dd76c2d4bc8f4f35ef47b8899c7c8e35b6c090c41320f30b4947314" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.008651 4689 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3229a49-3424-464a-b479-460fb0a21620-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.008677 4689 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d3229a49-3424-464a-b479-460fb0a21620-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.008689 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d02f5e12-ab55-4649-94db-f248e569c2d1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.008701 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8df17524-724f-469d-a215-26d230f8d2ce-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.008711 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ce9ebc5-b00b-4f83-8420-6a58b073efa5-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.008721 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8tzj\" (UniqueName: \"kubernetes.io/projected/d3229a49-3424-464a-b479-460fb0a21620-kube-api-access-h8tzj\") on node \"crc\" DevicePath \"\"" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.011735 4689 scope.go:117] "RemoveContainer" containerID="5b89e2aa3f93fd8f1cd63b43e50c7f27841f4c4772a3de8f60a2509c88358aca" Oct 13 21:15:50 crc kubenswrapper[4689]: E1013 21:15:50.012299 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b89e2aa3f93fd8f1cd63b43e50c7f27841f4c4772a3de8f60a2509c88358aca\": container with ID starting with 5b89e2aa3f93fd8f1cd63b43e50c7f27841f4c4772a3de8f60a2509c88358aca not found: ID does not exist" containerID="5b89e2aa3f93fd8f1cd63b43e50c7f27841f4c4772a3de8f60a2509c88358aca" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.012366 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b89e2aa3f93fd8f1cd63b43e50c7f27841f4c4772a3de8f60a2509c88358aca"} err="failed to get container status \"5b89e2aa3f93fd8f1cd63b43e50c7f27841f4c4772a3de8f60a2509c88358aca\": rpc error: code = NotFound desc = could not find container \"5b89e2aa3f93fd8f1cd63b43e50c7f27841f4c4772a3de8f60a2509c88358aca\": container with ID starting with 5b89e2aa3f93fd8f1cd63b43e50c7f27841f4c4772a3de8f60a2509c88358aca not found: ID does not exist" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.012426 4689 scope.go:117] "RemoveContainer" containerID="59146a5dd362bc2c9a6c62dd2cee635d0924f5f0585d065a8af1617dfa2f72ec" Oct 13 21:15:50 crc kubenswrapper[4689]: E1013 21:15:50.012823 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59146a5dd362bc2c9a6c62dd2cee635d0924f5f0585d065a8af1617dfa2f72ec\": container with ID starting with 59146a5dd362bc2c9a6c62dd2cee635d0924f5f0585d065a8af1617dfa2f72ec not found: ID does not exist" containerID="59146a5dd362bc2c9a6c62dd2cee635d0924f5f0585d065a8af1617dfa2f72ec" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.012852 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59146a5dd362bc2c9a6c62dd2cee635d0924f5f0585d065a8af1617dfa2f72ec"} err="failed to get container status \"59146a5dd362bc2c9a6c62dd2cee635d0924f5f0585d065a8af1617dfa2f72ec\": rpc error: code = NotFound desc = could not find container \"59146a5dd362bc2c9a6c62dd2cee635d0924f5f0585d065a8af1617dfa2f72ec\": container with ID starting with 59146a5dd362bc2c9a6c62dd2cee635d0924f5f0585d065a8af1617dfa2f72ec not found: ID does not exist" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.012877 4689 scope.go:117] "RemoveContainer" containerID="c37a8e9d4dd76c2d4bc8f4f35ef47b8899c7c8e35b6c090c41320f30b4947314" Oct 13 21:15:50 crc kubenswrapper[4689]: E1013 21:15:50.013147 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c37a8e9d4dd76c2d4bc8f4f35ef47b8899c7c8e35b6c090c41320f30b4947314\": container with ID starting with c37a8e9d4dd76c2d4bc8f4f35ef47b8899c7c8e35b6c090c41320f30b4947314 not found: ID does not exist" containerID="c37a8e9d4dd76c2d4bc8f4f35ef47b8899c7c8e35b6c090c41320f30b4947314" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.013186 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c37a8e9d4dd76c2d4bc8f4f35ef47b8899c7c8e35b6c090c41320f30b4947314"} err="failed to get container status \"c37a8e9d4dd76c2d4bc8f4f35ef47b8899c7c8e35b6c090c41320f30b4947314\": rpc error: code = NotFound desc = could not find container \"c37a8e9d4dd76c2d4bc8f4f35ef47b8899c7c8e35b6c090c41320f30b4947314\": container with ID starting with c37a8e9d4dd76c2d4bc8f4f35ef47b8899c7c8e35b6c090c41320f30b4947314 not found: ID does not exist" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.013214 4689 scope.go:117] "RemoveContainer" containerID="5c1942f4a915ee2044d93c645c9b9464113ee8d701d19bd9bd297b2ae8bbd91b" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.038163 4689 scope.go:117] "RemoveContainer" containerID="2d2158ee76c0ec687aaf144ab9391f12515504570cbc7ff819b71ab6d5fa9e1c" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.056913 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nxrzk"] Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.059804 4689 scope.go:117] "RemoveContainer" containerID="b148be00cde7d47e0a98ee3f594696f512f3ebcba26a60060d6e17eaf36d6eae" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.061386 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nxrzk"] Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.075264 4689 scope.go:117] "RemoveContainer" containerID="5c1942f4a915ee2044d93c645c9b9464113ee8d701d19bd9bd297b2ae8bbd91b" Oct 13 21:15:50 crc kubenswrapper[4689]: E1013 21:15:50.075930 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c1942f4a915ee2044d93c645c9b9464113ee8d701d19bd9bd297b2ae8bbd91b\": container with ID starting with 5c1942f4a915ee2044d93c645c9b9464113ee8d701d19bd9bd297b2ae8bbd91b not found: ID does not exist" containerID="5c1942f4a915ee2044d93c645c9b9464113ee8d701d19bd9bd297b2ae8bbd91b" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.076017 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c1942f4a915ee2044d93c645c9b9464113ee8d701d19bd9bd297b2ae8bbd91b"} err="failed to get container status \"5c1942f4a915ee2044d93c645c9b9464113ee8d701d19bd9bd297b2ae8bbd91b\": rpc error: code = NotFound desc = could not find container \"5c1942f4a915ee2044d93c645c9b9464113ee8d701d19bd9bd297b2ae8bbd91b\": container with ID starting with 5c1942f4a915ee2044d93c645c9b9464113ee8d701d19bd9bd297b2ae8bbd91b not found: ID does not exist" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.076090 4689 scope.go:117] "RemoveContainer" containerID="2d2158ee76c0ec687aaf144ab9391f12515504570cbc7ff819b71ab6d5fa9e1c" Oct 13 21:15:50 crc kubenswrapper[4689]: E1013 21:15:50.077324 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d2158ee76c0ec687aaf144ab9391f12515504570cbc7ff819b71ab6d5fa9e1c\": container with ID starting with 2d2158ee76c0ec687aaf144ab9391f12515504570cbc7ff819b71ab6d5fa9e1c not found: ID does not exist" containerID="2d2158ee76c0ec687aaf144ab9391f12515504570cbc7ff819b71ab6d5fa9e1c" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.077366 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d2158ee76c0ec687aaf144ab9391f12515504570cbc7ff819b71ab6d5fa9e1c"} err="failed to get container status \"2d2158ee76c0ec687aaf144ab9391f12515504570cbc7ff819b71ab6d5fa9e1c\": rpc error: code = NotFound desc = could not find container \"2d2158ee76c0ec687aaf144ab9391f12515504570cbc7ff819b71ab6d5fa9e1c\": container with ID starting with 2d2158ee76c0ec687aaf144ab9391f12515504570cbc7ff819b71ab6d5fa9e1c not found: ID does not exist" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.077414 4689 scope.go:117] "RemoveContainer" containerID="b148be00cde7d47e0a98ee3f594696f512f3ebcba26a60060d6e17eaf36d6eae" Oct 13 21:15:50 crc kubenswrapper[4689]: E1013 21:15:50.077935 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b148be00cde7d47e0a98ee3f594696f512f3ebcba26a60060d6e17eaf36d6eae\": container with ID starting with b148be00cde7d47e0a98ee3f594696f512f3ebcba26a60060d6e17eaf36d6eae not found: ID does not exist" containerID="b148be00cde7d47e0a98ee3f594696f512f3ebcba26a60060d6e17eaf36d6eae" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.078011 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b148be00cde7d47e0a98ee3f594696f512f3ebcba26a60060d6e17eaf36d6eae"} err="failed to get container status \"b148be00cde7d47e0a98ee3f594696f512f3ebcba26a60060d6e17eaf36d6eae\": rpc error: code = NotFound desc = could not find container \"b148be00cde7d47e0a98ee3f594696f512f3ebcba26a60060d6e17eaf36d6eae\": container with ID starting with b148be00cde7d47e0a98ee3f594696f512f3ebcba26a60060d6e17eaf36d6eae not found: ID does not exist" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.094015 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-smc4t"] Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.107230 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-smc4t"] Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.122183 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mql5t"] Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.126836 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mql5t"] Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.129475 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jvx7z"] Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.138640 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jvx7z"] Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.149714 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bk9hg"] Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.153626 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bk9hg"] Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.165362 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c55dv"] Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.771693 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-c55dv" event={"ID":"6bdfaec3-47bd-4ca1-98f5-a5af88e0d075","Type":"ContainerStarted","Data":"4bde2fb83693a270d347328efef981bf206de54e40ae1943592011d4b4fe3b62"} Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.772051 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-c55dv" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.772072 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-c55dv" event={"ID":"6bdfaec3-47bd-4ca1-98f5-a5af88e0d075","Type":"ContainerStarted","Data":"c0b5acb3e7344a8a1ca394ff8108eb54b1eae8b1815925951b2bd030edbe28fc"} Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.783733 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-c55dv" Oct 13 21:15:50 crc kubenswrapper[4689]: I1013 21:15:50.793043 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-c55dv" podStartSLOduration=1.7930221560000001 podStartE2EDuration="1.793022156s" podCreationTimestamp="2025-10-13 21:15:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:15:50.788623513 +0000 UTC m=+267.706868598" watchObservedRunningTime="2025-10-13 21:15:50.793022156 +0000 UTC m=+267.711267241" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.224440 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5bqtl"] Oct 13 21:15:51 crc kubenswrapper[4689]: E1013 21:15:51.225329 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dabb64b-b7fc-4428-bcfe-10c98dbe797b" containerName="extract-content" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225349 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dabb64b-b7fc-4428-bcfe-10c98dbe797b" containerName="extract-content" Oct 13 21:15:51 crc kubenswrapper[4689]: E1013 21:15:51.225367 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8df17524-724f-469d-a215-26d230f8d2ce" containerName="extract-utilities" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225377 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8df17524-724f-469d-a215-26d230f8d2ce" containerName="extract-utilities" Oct 13 21:15:51 crc kubenswrapper[4689]: E1013 21:15:51.225390 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d02f5e12-ab55-4649-94db-f248e569c2d1" containerName="registry-server" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225400 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d02f5e12-ab55-4649-94db-f248e569c2d1" containerName="registry-server" Oct 13 21:15:51 crc kubenswrapper[4689]: E1013 21:15:51.225414 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dabb64b-b7fc-4428-bcfe-10c98dbe797b" containerName="registry-server" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225422 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dabb64b-b7fc-4428-bcfe-10c98dbe797b" containerName="registry-server" Oct 13 21:15:51 crc kubenswrapper[4689]: E1013 21:15:51.225439 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ce9ebc5-b00b-4f83-8420-6a58b073efa5" containerName="registry-server" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225447 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ce9ebc5-b00b-4f83-8420-6a58b073efa5" containerName="registry-server" Oct 13 21:15:51 crc kubenswrapper[4689]: E1013 21:15:51.225457 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d02f5e12-ab55-4649-94db-f248e569c2d1" containerName="extract-content" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225464 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d02f5e12-ab55-4649-94db-f248e569c2d1" containerName="extract-content" Oct 13 21:15:51 crc kubenswrapper[4689]: E1013 21:15:51.225474 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d02f5e12-ab55-4649-94db-f248e569c2d1" containerName="extract-utilities" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225483 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d02f5e12-ab55-4649-94db-f248e569c2d1" containerName="extract-utilities" Oct 13 21:15:51 crc kubenswrapper[4689]: E1013 21:15:51.225501 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dabb64b-b7fc-4428-bcfe-10c98dbe797b" containerName="extract-utilities" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225509 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dabb64b-b7fc-4428-bcfe-10c98dbe797b" containerName="extract-utilities" Oct 13 21:15:51 crc kubenswrapper[4689]: E1013 21:15:51.225519 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8df17524-724f-469d-a215-26d230f8d2ce" containerName="registry-server" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225527 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8df17524-724f-469d-a215-26d230f8d2ce" containerName="registry-server" Oct 13 21:15:51 crc kubenswrapper[4689]: E1013 21:15:51.225538 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3229a49-3424-464a-b479-460fb0a21620" containerName="marketplace-operator" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225546 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3229a49-3424-464a-b479-460fb0a21620" containerName="marketplace-operator" Oct 13 21:15:51 crc kubenswrapper[4689]: E1013 21:15:51.225563 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ce9ebc5-b00b-4f83-8420-6a58b073efa5" containerName="extract-utilities" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225572 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ce9ebc5-b00b-4f83-8420-6a58b073efa5" containerName="extract-utilities" Oct 13 21:15:51 crc kubenswrapper[4689]: E1013 21:15:51.225600 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8df17524-724f-469d-a215-26d230f8d2ce" containerName="extract-content" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225608 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8df17524-724f-469d-a215-26d230f8d2ce" containerName="extract-content" Oct 13 21:15:51 crc kubenswrapper[4689]: E1013 21:15:51.225621 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ce9ebc5-b00b-4f83-8420-6a58b073efa5" containerName="extract-content" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225630 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ce9ebc5-b00b-4f83-8420-6a58b073efa5" containerName="extract-content" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225762 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ce9ebc5-b00b-4f83-8420-6a58b073efa5" containerName="registry-server" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225779 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3229a49-3424-464a-b479-460fb0a21620" containerName="marketplace-operator" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225794 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="8df17524-724f-469d-a215-26d230f8d2ce" containerName="registry-server" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225806 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dabb64b-b7fc-4428-bcfe-10c98dbe797b" containerName="registry-server" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.225817 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="d02f5e12-ab55-4649-94db-f248e569c2d1" containerName="registry-server" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.227023 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5bqtl" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.229453 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.247901 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5bqtl"] Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.331853 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgz9r\" (UniqueName: \"kubernetes.io/projected/7f15d479-e732-4ffb-bb8a-c51ce29ffb85-kube-api-access-pgz9r\") pod \"certified-operators-5bqtl\" (UID: \"7f15d479-e732-4ffb-bb8a-c51ce29ffb85\") " pod="openshift-marketplace/certified-operators-5bqtl" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.332022 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f15d479-e732-4ffb-bb8a-c51ce29ffb85-catalog-content\") pod \"certified-operators-5bqtl\" (UID: \"7f15d479-e732-4ffb-bb8a-c51ce29ffb85\") " pod="openshift-marketplace/certified-operators-5bqtl" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.332166 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f15d479-e732-4ffb-bb8a-c51ce29ffb85-utilities\") pod \"certified-operators-5bqtl\" (UID: \"7f15d479-e732-4ffb-bb8a-c51ce29ffb85\") " pod="openshift-marketplace/certified-operators-5bqtl" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.433901 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f15d479-e732-4ffb-bb8a-c51ce29ffb85-utilities\") pod \"certified-operators-5bqtl\" (UID: \"7f15d479-e732-4ffb-bb8a-c51ce29ffb85\") " pod="openshift-marketplace/certified-operators-5bqtl" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.434361 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgz9r\" (UniqueName: \"kubernetes.io/projected/7f15d479-e732-4ffb-bb8a-c51ce29ffb85-kube-api-access-pgz9r\") pod \"certified-operators-5bqtl\" (UID: \"7f15d479-e732-4ffb-bb8a-c51ce29ffb85\") " pod="openshift-marketplace/certified-operators-5bqtl" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.434482 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f15d479-e732-4ffb-bb8a-c51ce29ffb85-catalog-content\") pod \"certified-operators-5bqtl\" (UID: \"7f15d479-e732-4ffb-bb8a-c51ce29ffb85\") " pod="openshift-marketplace/certified-operators-5bqtl" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.434974 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f15d479-e732-4ffb-bb8a-c51ce29ffb85-catalog-content\") pod \"certified-operators-5bqtl\" (UID: \"7f15d479-e732-4ffb-bb8a-c51ce29ffb85\") " pod="openshift-marketplace/certified-operators-5bqtl" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.434982 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f15d479-e732-4ffb-bb8a-c51ce29ffb85-utilities\") pod \"certified-operators-5bqtl\" (UID: \"7f15d479-e732-4ffb-bb8a-c51ce29ffb85\") " pod="openshift-marketplace/certified-operators-5bqtl" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.466083 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgz9r\" (UniqueName: \"kubernetes.io/projected/7f15d479-e732-4ffb-bb8a-c51ce29ffb85-kube-api-access-pgz9r\") pod \"certified-operators-5bqtl\" (UID: \"7f15d479-e732-4ffb-bb8a-c51ce29ffb85\") " pod="openshift-marketplace/certified-operators-5bqtl" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.540423 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5bqtl" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.812122 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5bqtl"] Oct 13 21:15:51 crc kubenswrapper[4689]: W1013 21:15:51.837324 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f15d479_e732_4ffb_bb8a_c51ce29ffb85.slice/crio-b6604f90b6c726c3d4987867b0b8e447e694b3db89f994d27c9c3e2a4a6caee3 WatchSource:0}: Error finding container b6604f90b6c726c3d4987867b0b8e447e694b3db89f994d27c9c3e2a4a6caee3: Status 404 returned error can't find the container with id b6604f90b6c726c3d4987867b0b8e447e694b3db89f994d27c9c3e2a4a6caee3 Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.857485 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6br62"] Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.860221 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6br62" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.864579 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.865038 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6br62"] Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.896461 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ce9ebc5-b00b-4f83-8420-6a58b073efa5" path="/var/lib/kubelet/pods/0ce9ebc5-b00b-4f83-8420-6a58b073efa5/volumes" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.897966 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0dabb64b-b7fc-4428-bcfe-10c98dbe797b" path="/var/lib/kubelet/pods/0dabb64b-b7fc-4428-bcfe-10c98dbe797b/volumes" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.899044 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8df17524-724f-469d-a215-26d230f8d2ce" path="/var/lib/kubelet/pods/8df17524-724f-469d-a215-26d230f8d2ce/volumes" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.900784 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d02f5e12-ab55-4649-94db-f248e569c2d1" path="/var/lib/kubelet/pods/d02f5e12-ab55-4649-94db-f248e569c2d1/volumes" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.901622 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3229a49-3424-464a-b479-460fb0a21620" path="/var/lib/kubelet/pods/d3229a49-3424-464a-b479-460fb0a21620/volumes" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.953975 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b9207c1-e871-4ab7-b030-03664c9e6af4-utilities\") pod \"redhat-marketplace-6br62\" (UID: \"1b9207c1-e871-4ab7-b030-03664c9e6af4\") " pod="openshift-marketplace/redhat-marketplace-6br62" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.954079 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b9207c1-e871-4ab7-b030-03664c9e6af4-catalog-content\") pod \"redhat-marketplace-6br62\" (UID: \"1b9207c1-e871-4ab7-b030-03664c9e6af4\") " pod="openshift-marketplace/redhat-marketplace-6br62" Oct 13 21:15:51 crc kubenswrapper[4689]: I1013 21:15:51.954108 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whbws\" (UniqueName: \"kubernetes.io/projected/1b9207c1-e871-4ab7-b030-03664c9e6af4-kube-api-access-whbws\") pod \"redhat-marketplace-6br62\" (UID: \"1b9207c1-e871-4ab7-b030-03664c9e6af4\") " pod="openshift-marketplace/redhat-marketplace-6br62" Oct 13 21:15:52 crc kubenswrapper[4689]: I1013 21:15:52.055417 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b9207c1-e871-4ab7-b030-03664c9e6af4-catalog-content\") pod \"redhat-marketplace-6br62\" (UID: \"1b9207c1-e871-4ab7-b030-03664c9e6af4\") " pod="openshift-marketplace/redhat-marketplace-6br62" Oct 13 21:15:52 crc kubenswrapper[4689]: I1013 21:15:52.056750 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whbws\" (UniqueName: \"kubernetes.io/projected/1b9207c1-e871-4ab7-b030-03664c9e6af4-kube-api-access-whbws\") pod \"redhat-marketplace-6br62\" (UID: \"1b9207c1-e871-4ab7-b030-03664c9e6af4\") " pod="openshift-marketplace/redhat-marketplace-6br62" Oct 13 21:15:52 crc kubenswrapper[4689]: I1013 21:15:52.057159 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b9207c1-e871-4ab7-b030-03664c9e6af4-utilities\") pod \"redhat-marketplace-6br62\" (UID: \"1b9207c1-e871-4ab7-b030-03664c9e6af4\") " pod="openshift-marketplace/redhat-marketplace-6br62" Oct 13 21:15:52 crc kubenswrapper[4689]: I1013 21:15:52.056123 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b9207c1-e871-4ab7-b030-03664c9e6af4-catalog-content\") pod \"redhat-marketplace-6br62\" (UID: \"1b9207c1-e871-4ab7-b030-03664c9e6af4\") " pod="openshift-marketplace/redhat-marketplace-6br62" Oct 13 21:15:52 crc kubenswrapper[4689]: I1013 21:15:52.057433 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b9207c1-e871-4ab7-b030-03664c9e6af4-utilities\") pod \"redhat-marketplace-6br62\" (UID: \"1b9207c1-e871-4ab7-b030-03664c9e6af4\") " pod="openshift-marketplace/redhat-marketplace-6br62" Oct 13 21:15:52 crc kubenswrapper[4689]: I1013 21:15:52.076733 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whbws\" (UniqueName: \"kubernetes.io/projected/1b9207c1-e871-4ab7-b030-03664c9e6af4-kube-api-access-whbws\") pod \"redhat-marketplace-6br62\" (UID: \"1b9207c1-e871-4ab7-b030-03664c9e6af4\") " pod="openshift-marketplace/redhat-marketplace-6br62" Oct 13 21:15:52 crc kubenswrapper[4689]: I1013 21:15:52.218108 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6br62" Oct 13 21:15:52 crc kubenswrapper[4689]: I1013 21:15:52.418748 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6br62"] Oct 13 21:15:52 crc kubenswrapper[4689]: W1013 21:15:52.428808 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b9207c1_e871_4ab7_b030_03664c9e6af4.slice/crio-310f3a07b70af3487ef80e546832b2dfc4a6e3e0a9953ef4ed7788cbab68ceef WatchSource:0}: Error finding container 310f3a07b70af3487ef80e546832b2dfc4a6e3e0a9953ef4ed7788cbab68ceef: Status 404 returned error can't find the container with id 310f3a07b70af3487ef80e546832b2dfc4a6e3e0a9953ef4ed7788cbab68ceef Oct 13 21:15:52 crc kubenswrapper[4689]: I1013 21:15:52.787839 4689 generic.go:334] "Generic (PLEG): container finished" podID="7f15d479-e732-4ffb-bb8a-c51ce29ffb85" containerID="08359442430dd55db1f3134fa2b5543f3915f9fabeecbba4458d6f469b613885" exitCode=0 Oct 13 21:15:52 crc kubenswrapper[4689]: I1013 21:15:52.787993 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5bqtl" event={"ID":"7f15d479-e732-4ffb-bb8a-c51ce29ffb85","Type":"ContainerDied","Data":"08359442430dd55db1f3134fa2b5543f3915f9fabeecbba4458d6f469b613885"} Oct 13 21:15:52 crc kubenswrapper[4689]: I1013 21:15:52.788294 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5bqtl" event={"ID":"7f15d479-e732-4ffb-bb8a-c51ce29ffb85","Type":"ContainerStarted","Data":"b6604f90b6c726c3d4987867b0b8e447e694b3db89f994d27c9c3e2a4a6caee3"} Oct 13 21:15:52 crc kubenswrapper[4689]: I1013 21:15:52.791884 4689 generic.go:334] "Generic (PLEG): container finished" podID="1b9207c1-e871-4ab7-b030-03664c9e6af4" containerID="7aa6f480679a479529b10e45540e9d6121110271e497c9226945940d0565e17f" exitCode=0 Oct 13 21:15:52 crc kubenswrapper[4689]: I1013 21:15:52.791950 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6br62" event={"ID":"1b9207c1-e871-4ab7-b030-03664c9e6af4","Type":"ContainerDied","Data":"7aa6f480679a479529b10e45540e9d6121110271e497c9226945940d0565e17f"} Oct 13 21:15:52 crc kubenswrapper[4689]: I1013 21:15:52.792030 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6br62" event={"ID":"1b9207c1-e871-4ab7-b030-03664c9e6af4","Type":"ContainerStarted","Data":"310f3a07b70af3487ef80e546832b2dfc4a6e3e0a9953ef4ed7788cbab68ceef"} Oct 13 21:15:53 crc kubenswrapper[4689]: I1013 21:15:53.624303 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mfrg8"] Oct 13 21:15:53 crc kubenswrapper[4689]: I1013 21:15:53.625227 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mfrg8" Oct 13 21:15:53 crc kubenswrapper[4689]: I1013 21:15:53.632012 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 13 21:15:53 crc kubenswrapper[4689]: I1013 21:15:53.647054 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mfrg8"] Oct 13 21:15:53 crc kubenswrapper[4689]: I1013 21:15:53.679275 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1307a0a-b3fe-421a-bcdb-b390cb59638d-catalog-content\") pod \"redhat-operators-mfrg8\" (UID: \"d1307a0a-b3fe-421a-bcdb-b390cb59638d\") " pod="openshift-marketplace/redhat-operators-mfrg8" Oct 13 21:15:53 crc kubenswrapper[4689]: I1013 21:15:53.679334 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txwvh\" (UniqueName: \"kubernetes.io/projected/d1307a0a-b3fe-421a-bcdb-b390cb59638d-kube-api-access-txwvh\") pod \"redhat-operators-mfrg8\" (UID: \"d1307a0a-b3fe-421a-bcdb-b390cb59638d\") " pod="openshift-marketplace/redhat-operators-mfrg8" Oct 13 21:15:53 crc kubenswrapper[4689]: I1013 21:15:53.679382 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1307a0a-b3fe-421a-bcdb-b390cb59638d-utilities\") pod \"redhat-operators-mfrg8\" (UID: \"d1307a0a-b3fe-421a-bcdb-b390cb59638d\") " pod="openshift-marketplace/redhat-operators-mfrg8" Oct 13 21:15:53 crc kubenswrapper[4689]: I1013 21:15:53.780459 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1307a0a-b3fe-421a-bcdb-b390cb59638d-utilities\") pod \"redhat-operators-mfrg8\" (UID: \"d1307a0a-b3fe-421a-bcdb-b390cb59638d\") " pod="openshift-marketplace/redhat-operators-mfrg8" Oct 13 21:15:53 crc kubenswrapper[4689]: I1013 21:15:53.780527 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1307a0a-b3fe-421a-bcdb-b390cb59638d-catalog-content\") pod \"redhat-operators-mfrg8\" (UID: \"d1307a0a-b3fe-421a-bcdb-b390cb59638d\") " pod="openshift-marketplace/redhat-operators-mfrg8" Oct 13 21:15:53 crc kubenswrapper[4689]: I1013 21:15:53.780560 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txwvh\" (UniqueName: \"kubernetes.io/projected/d1307a0a-b3fe-421a-bcdb-b390cb59638d-kube-api-access-txwvh\") pod \"redhat-operators-mfrg8\" (UID: \"d1307a0a-b3fe-421a-bcdb-b390cb59638d\") " pod="openshift-marketplace/redhat-operators-mfrg8" Oct 13 21:15:53 crc kubenswrapper[4689]: I1013 21:15:53.781122 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1307a0a-b3fe-421a-bcdb-b390cb59638d-catalog-content\") pod \"redhat-operators-mfrg8\" (UID: \"d1307a0a-b3fe-421a-bcdb-b390cb59638d\") " pod="openshift-marketplace/redhat-operators-mfrg8" Oct 13 21:15:53 crc kubenswrapper[4689]: I1013 21:15:53.782403 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1307a0a-b3fe-421a-bcdb-b390cb59638d-utilities\") pod \"redhat-operators-mfrg8\" (UID: \"d1307a0a-b3fe-421a-bcdb-b390cb59638d\") " pod="openshift-marketplace/redhat-operators-mfrg8" Oct 13 21:15:53 crc kubenswrapper[4689]: I1013 21:15:53.799532 4689 generic.go:334] "Generic (PLEG): container finished" podID="1b9207c1-e871-4ab7-b030-03664c9e6af4" containerID="6300ca1b7024fd2da7a3e5d20c8fea04228f20122bc9972a16a2b943632ff86e" exitCode=0 Oct 13 21:15:53 crc kubenswrapper[4689]: I1013 21:15:53.799626 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6br62" event={"ID":"1b9207c1-e871-4ab7-b030-03664c9e6af4","Type":"ContainerDied","Data":"6300ca1b7024fd2da7a3e5d20c8fea04228f20122bc9972a16a2b943632ff86e"} Oct 13 21:15:53 crc kubenswrapper[4689]: I1013 21:15:53.802329 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5bqtl" event={"ID":"7f15d479-e732-4ffb-bb8a-c51ce29ffb85","Type":"ContainerStarted","Data":"7bda2cfb3eb6a99c31aeab2d81a52b9f8232ba8d28043b9f9b37c267176b87f8"} Oct 13 21:15:53 crc kubenswrapper[4689]: I1013 21:15:53.814763 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txwvh\" (UniqueName: \"kubernetes.io/projected/d1307a0a-b3fe-421a-bcdb-b390cb59638d-kube-api-access-txwvh\") pod \"redhat-operators-mfrg8\" (UID: \"d1307a0a-b3fe-421a-bcdb-b390cb59638d\") " pod="openshift-marketplace/redhat-operators-mfrg8" Oct 13 21:15:53 crc kubenswrapper[4689]: I1013 21:15:53.967634 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mfrg8" Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.223548 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5rfc2"] Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.226074 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5rfc2" Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.228143 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.237265 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5rfc2"] Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.287427 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22bd82e7-c0ff-418c-8aec-3d373e40bac3-catalog-content\") pod \"community-operators-5rfc2\" (UID: \"22bd82e7-c0ff-418c-8aec-3d373e40bac3\") " pod="openshift-marketplace/community-operators-5rfc2" Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.287674 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22bd82e7-c0ff-418c-8aec-3d373e40bac3-utilities\") pod \"community-operators-5rfc2\" (UID: \"22bd82e7-c0ff-418c-8aec-3d373e40bac3\") " pod="openshift-marketplace/community-operators-5rfc2" Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.287760 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cx25g\" (UniqueName: \"kubernetes.io/projected/22bd82e7-c0ff-418c-8aec-3d373e40bac3-kube-api-access-cx25g\") pod \"community-operators-5rfc2\" (UID: \"22bd82e7-c0ff-418c-8aec-3d373e40bac3\") " pod="openshift-marketplace/community-operators-5rfc2" Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.376296 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mfrg8"] Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.389243 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22bd82e7-c0ff-418c-8aec-3d373e40bac3-catalog-content\") pod \"community-operators-5rfc2\" (UID: \"22bd82e7-c0ff-418c-8aec-3d373e40bac3\") " pod="openshift-marketplace/community-operators-5rfc2" Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.389329 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22bd82e7-c0ff-418c-8aec-3d373e40bac3-utilities\") pod \"community-operators-5rfc2\" (UID: \"22bd82e7-c0ff-418c-8aec-3d373e40bac3\") " pod="openshift-marketplace/community-operators-5rfc2" Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.389365 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cx25g\" (UniqueName: \"kubernetes.io/projected/22bd82e7-c0ff-418c-8aec-3d373e40bac3-kube-api-access-cx25g\") pod \"community-operators-5rfc2\" (UID: \"22bd82e7-c0ff-418c-8aec-3d373e40bac3\") " pod="openshift-marketplace/community-operators-5rfc2" Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.389912 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22bd82e7-c0ff-418c-8aec-3d373e40bac3-utilities\") pod \"community-operators-5rfc2\" (UID: \"22bd82e7-c0ff-418c-8aec-3d373e40bac3\") " pod="openshift-marketplace/community-operators-5rfc2" Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.390228 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22bd82e7-c0ff-418c-8aec-3d373e40bac3-catalog-content\") pod \"community-operators-5rfc2\" (UID: \"22bd82e7-c0ff-418c-8aec-3d373e40bac3\") " pod="openshift-marketplace/community-operators-5rfc2" Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.410156 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cx25g\" (UniqueName: \"kubernetes.io/projected/22bd82e7-c0ff-418c-8aec-3d373e40bac3-kube-api-access-cx25g\") pod \"community-operators-5rfc2\" (UID: \"22bd82e7-c0ff-418c-8aec-3d373e40bac3\") " pod="openshift-marketplace/community-operators-5rfc2" Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.541599 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5rfc2" Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.734530 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5rfc2"] Oct 13 21:15:54 crc kubenswrapper[4689]: W1013 21:15:54.748996 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod22bd82e7_c0ff_418c_8aec_3d373e40bac3.slice/crio-3a8fce4dadbb855612a2b9817034e8ed91c5d9db9ce666808f1d205d4ce979df WatchSource:0}: Error finding container 3a8fce4dadbb855612a2b9817034e8ed91c5d9db9ce666808f1d205d4ce979df: Status 404 returned error can't find the container with id 3a8fce4dadbb855612a2b9817034e8ed91c5d9db9ce666808f1d205d4ce979df Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.818963 4689 generic.go:334] "Generic (PLEG): container finished" podID="7f15d479-e732-4ffb-bb8a-c51ce29ffb85" containerID="7bda2cfb3eb6a99c31aeab2d81a52b9f8232ba8d28043b9f9b37c267176b87f8" exitCode=0 Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.819078 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5bqtl" event={"ID":"7f15d479-e732-4ffb-bb8a-c51ce29ffb85","Type":"ContainerDied","Data":"7bda2cfb3eb6a99c31aeab2d81a52b9f8232ba8d28043b9f9b37c267176b87f8"} Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.821532 4689 generic.go:334] "Generic (PLEG): container finished" podID="d1307a0a-b3fe-421a-bcdb-b390cb59638d" containerID="3b4b8213ca6bc331a15dfffa9c11cc5f2d276f345554f5c4b6011898764b4561" exitCode=0 Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.821656 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mfrg8" event={"ID":"d1307a0a-b3fe-421a-bcdb-b390cb59638d","Type":"ContainerDied","Data":"3b4b8213ca6bc331a15dfffa9c11cc5f2d276f345554f5c4b6011898764b4561"} Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.821732 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mfrg8" event={"ID":"d1307a0a-b3fe-421a-bcdb-b390cb59638d","Type":"ContainerStarted","Data":"9f7848d920b44e00bdd8abd711904762c9785d0242bfb2902cbca3d43162c842"} Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.827249 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6br62" event={"ID":"1b9207c1-e871-4ab7-b030-03664c9e6af4","Type":"ContainerStarted","Data":"7f10b7fff25801a6802e5aa24452f14a89a2516573c3ff9b4eeaceee9cd12314"} Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.829733 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5rfc2" event={"ID":"22bd82e7-c0ff-418c-8aec-3d373e40bac3","Type":"ContainerStarted","Data":"3a8fce4dadbb855612a2b9817034e8ed91c5d9db9ce666808f1d205d4ce979df"} Oct 13 21:15:54 crc kubenswrapper[4689]: I1013 21:15:54.890484 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6br62" podStartSLOduration=2.3609732709999998 podStartE2EDuration="3.890456671s" podCreationTimestamp="2025-10-13 21:15:51 +0000 UTC" firstStartedPulling="2025-10-13 21:15:52.795129109 +0000 UTC m=+269.713374194" lastFinishedPulling="2025-10-13 21:15:54.324612509 +0000 UTC m=+271.242857594" observedRunningTime="2025-10-13 21:15:54.889090455 +0000 UTC m=+271.807335540" watchObservedRunningTime="2025-10-13 21:15:54.890456671 +0000 UTC m=+271.808701756" Oct 13 21:15:55 crc kubenswrapper[4689]: I1013 21:15:55.835547 4689 generic.go:334] "Generic (PLEG): container finished" podID="22bd82e7-c0ff-418c-8aec-3d373e40bac3" containerID="db497b93d137b2492c7ee11ca67b29b78a1a7c56f79822748fed9c51e94c80d1" exitCode=0 Oct 13 21:15:55 crc kubenswrapper[4689]: I1013 21:15:55.835703 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5rfc2" event={"ID":"22bd82e7-c0ff-418c-8aec-3d373e40bac3","Type":"ContainerDied","Data":"db497b93d137b2492c7ee11ca67b29b78a1a7c56f79822748fed9c51e94c80d1"} Oct 13 21:15:55 crc kubenswrapper[4689]: I1013 21:15:55.838503 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5bqtl" event={"ID":"7f15d479-e732-4ffb-bb8a-c51ce29ffb85","Type":"ContainerStarted","Data":"0a56692e426b80758b9025af02aa0023ca3f631c1013f79ccbfaeae5792382d1"} Oct 13 21:15:55 crc kubenswrapper[4689]: I1013 21:15:55.842003 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mfrg8" event={"ID":"d1307a0a-b3fe-421a-bcdb-b390cb59638d","Type":"ContainerStarted","Data":"e18159f8ce4433fc7d2b32f162f8662388abc7a3550017edfd7d41b81058ca57"} Oct 13 21:15:55 crc kubenswrapper[4689]: I1013 21:15:55.888433 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5bqtl" podStartSLOduration=2.268599684 podStartE2EDuration="4.888413363s" podCreationTimestamp="2025-10-13 21:15:51 +0000 UTC" firstStartedPulling="2025-10-13 21:15:52.790320295 +0000 UTC m=+269.708565380" lastFinishedPulling="2025-10-13 21:15:55.410133974 +0000 UTC m=+272.328379059" observedRunningTime="2025-10-13 21:15:55.870354028 +0000 UTC m=+272.788599123" watchObservedRunningTime="2025-10-13 21:15:55.888413363 +0000 UTC m=+272.806658448" Oct 13 21:15:56 crc kubenswrapper[4689]: I1013 21:15:56.851945 4689 generic.go:334] "Generic (PLEG): container finished" podID="d1307a0a-b3fe-421a-bcdb-b390cb59638d" containerID="e18159f8ce4433fc7d2b32f162f8662388abc7a3550017edfd7d41b81058ca57" exitCode=0 Oct 13 21:15:56 crc kubenswrapper[4689]: I1013 21:15:56.852061 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mfrg8" event={"ID":"d1307a0a-b3fe-421a-bcdb-b390cb59638d","Type":"ContainerDied","Data":"e18159f8ce4433fc7d2b32f162f8662388abc7a3550017edfd7d41b81058ca57"} Oct 13 21:15:57 crc kubenswrapper[4689]: I1013 21:15:57.861835 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5rfc2" event={"ID":"22bd82e7-c0ff-418c-8aec-3d373e40bac3","Type":"ContainerStarted","Data":"29d3f823727851ebe62d28aad3a3fc0788b471f6b76001bc0a732540a0ecf928"} Oct 13 21:15:57 crc kubenswrapper[4689]: I1013 21:15:57.865505 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mfrg8" event={"ID":"d1307a0a-b3fe-421a-bcdb-b390cb59638d","Type":"ContainerStarted","Data":"ace947f176db099fd6e5d4a3820091883cf77bbbf6e455d764789d0395d717d9"} Oct 13 21:15:57 crc kubenswrapper[4689]: I1013 21:15:57.899518 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mfrg8" podStartSLOduration=2.398486724 podStartE2EDuration="4.899492966s" podCreationTimestamp="2025-10-13 21:15:53 +0000 UTC" firstStartedPulling="2025-10-13 21:15:54.822847891 +0000 UTC m=+271.741092976" lastFinishedPulling="2025-10-13 21:15:57.323854123 +0000 UTC m=+274.242099218" observedRunningTime="2025-10-13 21:15:57.899354983 +0000 UTC m=+274.817600068" watchObservedRunningTime="2025-10-13 21:15:57.899492966 +0000 UTC m=+274.817738071" Oct 13 21:15:58 crc kubenswrapper[4689]: I1013 21:15:58.874367 4689 generic.go:334] "Generic (PLEG): container finished" podID="22bd82e7-c0ff-418c-8aec-3d373e40bac3" containerID="29d3f823727851ebe62d28aad3a3fc0788b471f6b76001bc0a732540a0ecf928" exitCode=0 Oct 13 21:15:58 crc kubenswrapper[4689]: I1013 21:15:58.874477 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5rfc2" event={"ID":"22bd82e7-c0ff-418c-8aec-3d373e40bac3","Type":"ContainerDied","Data":"29d3f823727851ebe62d28aad3a3fc0788b471f6b76001bc0a732540a0ecf928"} Oct 13 21:15:59 crc kubenswrapper[4689]: I1013 21:15:59.882825 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5rfc2" event={"ID":"22bd82e7-c0ff-418c-8aec-3d373e40bac3","Type":"ContainerStarted","Data":"810dcbcc875acffcc19d82c34ab7074f585889dac5d10b6fda4a721197eb6376"} Oct 13 21:15:59 crc kubenswrapper[4689]: I1013 21:15:59.901392 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5rfc2" podStartSLOduration=2.4035283290000002 podStartE2EDuration="5.901373123s" podCreationTimestamp="2025-10-13 21:15:54 +0000 UTC" firstStartedPulling="2025-10-13 21:15:55.838029346 +0000 UTC m=+272.756274431" lastFinishedPulling="2025-10-13 21:15:59.33587414 +0000 UTC m=+276.254119225" observedRunningTime="2025-10-13 21:15:59.898486279 +0000 UTC m=+276.816731364" watchObservedRunningTime="2025-10-13 21:15:59.901373123 +0000 UTC m=+276.819618208" Oct 13 21:16:02 crc kubenswrapper[4689]: I1013 21:16:01.541144 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5bqtl" Oct 13 21:16:02 crc kubenswrapper[4689]: I1013 21:16:01.541445 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5bqtl" Oct 13 21:16:02 crc kubenswrapper[4689]: I1013 21:16:01.583786 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5bqtl" Oct 13 21:16:02 crc kubenswrapper[4689]: I1013 21:16:01.930814 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5bqtl" Oct 13 21:16:02 crc kubenswrapper[4689]: I1013 21:16:02.219311 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6br62" Oct 13 21:16:02 crc kubenswrapper[4689]: I1013 21:16:02.219855 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6br62" Oct 13 21:16:02 crc kubenswrapper[4689]: I1013 21:16:02.258425 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6br62" Oct 13 21:16:02 crc kubenswrapper[4689]: I1013 21:16:02.959561 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6br62" Oct 13 21:16:03 crc kubenswrapper[4689]: I1013 21:16:03.967947 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mfrg8" Oct 13 21:16:03 crc kubenswrapper[4689]: I1013 21:16:03.968195 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mfrg8" Oct 13 21:16:04 crc kubenswrapper[4689]: I1013 21:16:04.010043 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mfrg8" Oct 13 21:16:04 crc kubenswrapper[4689]: I1013 21:16:04.541851 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5rfc2" Oct 13 21:16:04 crc kubenswrapper[4689]: I1013 21:16:04.541948 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5rfc2" Oct 13 21:16:04 crc kubenswrapper[4689]: I1013 21:16:04.579608 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5rfc2" Oct 13 21:16:04 crc kubenswrapper[4689]: I1013 21:16:04.958538 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mfrg8" Oct 13 21:16:04 crc kubenswrapper[4689]: I1013 21:16:04.968475 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5rfc2" Oct 13 21:17:23 crc kubenswrapper[4689]: I1013 21:17:23.859451 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:17:23 crc kubenswrapper[4689]: I1013 21:17:23.860002 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:17:53 crc kubenswrapper[4689]: I1013 21:17:53.859009 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:17:53 crc kubenswrapper[4689]: I1013 21:17:53.859773 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:18:23 crc kubenswrapper[4689]: I1013 21:18:23.859176 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:18:23 crc kubenswrapper[4689]: I1013 21:18:23.860494 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:18:23 crc kubenswrapper[4689]: I1013 21:18:23.860640 4689 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:18:23 crc kubenswrapper[4689]: I1013 21:18:23.862296 4689 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a2f64fccdbff6f9fbf34a77ede4edac00241e08c4bc51864952122589c103b1a"} pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 21:18:23 crc kubenswrapper[4689]: I1013 21:18:23.862463 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" containerID="cri-o://a2f64fccdbff6f9fbf34a77ede4edac00241e08c4bc51864952122589c103b1a" gracePeriod=600 Oct 13 21:18:24 crc kubenswrapper[4689]: I1013 21:18:24.688070 4689 generic.go:334] "Generic (PLEG): container finished" podID="1863da92-265f-451e-a741-a184c8d3f781" containerID="a2f64fccdbff6f9fbf34a77ede4edac00241e08c4bc51864952122589c103b1a" exitCode=0 Oct 13 21:18:24 crc kubenswrapper[4689]: I1013 21:18:24.688209 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerDied","Data":"a2f64fccdbff6f9fbf34a77ede4edac00241e08c4bc51864952122589c103b1a"} Oct 13 21:18:24 crc kubenswrapper[4689]: I1013 21:18:24.688611 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerStarted","Data":"b8c850c25ce9ad448a6035ec03d9103c3aaadb9f60c108dca0caaf9e3d4833c4"} Oct 13 21:18:24 crc kubenswrapper[4689]: I1013 21:18:24.688652 4689 scope.go:117] "RemoveContainer" containerID="547439d253280be0a8d1b985c8b48e2d3ee14926e10277074a4356648d102340" Oct 13 21:18:27 crc kubenswrapper[4689]: I1013 21:18:27.777783 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-4wb2c"] Oct 13 21:18:27 crc kubenswrapper[4689]: I1013 21:18:27.780228 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:27 crc kubenswrapper[4689]: I1013 21:18:27.799023 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-4wb2c"] Oct 13 21:18:27 crc kubenswrapper[4689]: I1013 21:18:27.965631 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:27 crc kubenswrapper[4689]: I1013 21:18:27.965688 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c5365d66-d62c-47e2-896e-76a33db74424-installation-pull-secrets\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:27 crc kubenswrapper[4689]: I1013 21:18:27.965724 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c5365d66-d62c-47e2-896e-76a33db74424-registry-tls\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:27 crc kubenswrapper[4689]: I1013 21:18:27.965750 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c5365d66-d62c-47e2-896e-76a33db74424-trusted-ca\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:27 crc kubenswrapper[4689]: I1013 21:18:27.965774 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c5365d66-d62c-47e2-896e-76a33db74424-registry-certificates\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:27 crc kubenswrapper[4689]: I1013 21:18:27.965838 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c5365d66-d62c-47e2-896e-76a33db74424-bound-sa-token\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:27 crc kubenswrapper[4689]: I1013 21:18:27.965952 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fb5xr\" (UniqueName: \"kubernetes.io/projected/c5365d66-d62c-47e2-896e-76a33db74424-kube-api-access-fb5xr\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:27 crc kubenswrapper[4689]: I1013 21:18:27.966007 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c5365d66-d62c-47e2-896e-76a33db74424-ca-trust-extracted\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:27 crc kubenswrapper[4689]: I1013 21:18:27.983002 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.068987 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fb5xr\" (UniqueName: \"kubernetes.io/projected/c5365d66-d62c-47e2-896e-76a33db74424-kube-api-access-fb5xr\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.069053 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c5365d66-d62c-47e2-896e-76a33db74424-ca-trust-extracted\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.069138 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c5365d66-d62c-47e2-896e-76a33db74424-installation-pull-secrets\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.069179 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c5365d66-d62c-47e2-896e-76a33db74424-registry-tls\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.069210 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c5365d66-d62c-47e2-896e-76a33db74424-trusted-ca\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.069236 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c5365d66-d62c-47e2-896e-76a33db74424-registry-certificates\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.069291 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c5365d66-d62c-47e2-896e-76a33db74424-bound-sa-token\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.070364 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c5365d66-d62c-47e2-896e-76a33db74424-ca-trust-extracted\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.075543 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c5365d66-d62c-47e2-896e-76a33db74424-registry-certificates\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.077968 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c5365d66-d62c-47e2-896e-76a33db74424-installation-pull-secrets\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.078403 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c5365d66-d62c-47e2-896e-76a33db74424-registry-tls\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.078457 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c5365d66-d62c-47e2-896e-76a33db74424-trusted-ca\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.092570 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c5365d66-d62c-47e2-896e-76a33db74424-bound-sa-token\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.093209 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fb5xr\" (UniqueName: \"kubernetes.io/projected/c5365d66-d62c-47e2-896e-76a33db74424-kube-api-access-fb5xr\") pod \"image-registry-66df7c8f76-4wb2c\" (UID: \"c5365d66-d62c-47e2-896e-76a33db74424\") " pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.094554 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.478106 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-4wb2c"] Oct 13 21:18:28 crc kubenswrapper[4689]: W1013 21:18:28.484703 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5365d66_d62c_47e2_896e_76a33db74424.slice/crio-462edef3e5ca85c1c731f5efb037045c1ef741c8a3a8cceaa7de27e02eec6fe0 WatchSource:0}: Error finding container 462edef3e5ca85c1c731f5efb037045c1ef741c8a3a8cceaa7de27e02eec6fe0: Status 404 returned error can't find the container with id 462edef3e5ca85c1c731f5efb037045c1ef741c8a3a8cceaa7de27e02eec6fe0 Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.715019 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" event={"ID":"c5365d66-d62c-47e2-896e-76a33db74424","Type":"ContainerStarted","Data":"ea89232cc343335fc907363385a78236d844d942a15d28da5d480c94587b09e5"} Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.715435 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" event={"ID":"c5365d66-d62c-47e2-896e-76a33db74424","Type":"ContainerStarted","Data":"462edef3e5ca85c1c731f5efb037045c1ef741c8a3a8cceaa7de27e02eec6fe0"} Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.715477 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:28 crc kubenswrapper[4689]: I1013 21:18:28.732994 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" podStartSLOduration=1.732972408 podStartE2EDuration="1.732972408s" podCreationTimestamp="2025-10-13 21:18:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:18:28.729869448 +0000 UTC m=+425.648114543" watchObservedRunningTime="2025-10-13 21:18:28.732972408 +0000 UTC m=+425.651217493" Oct 13 21:18:48 crc kubenswrapper[4689]: I1013 21:18:48.102036 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-4wb2c" Oct 13 21:18:48 crc kubenswrapper[4689]: I1013 21:18:48.160374 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m87tp"] Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.212865 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" podUID="2c78b93f-8347-4c41-a948-bacab534efdf" containerName="registry" containerID="cri-o://4bae03277b120ee45b419bfbbda877496d9a50742623443dc767e020df1555a9" gracePeriod=30 Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.603940 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.692137 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-registry-tls\") pod \"2c78b93f-8347-4c41-a948-bacab534efdf\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.692190 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-bound-sa-token\") pod \"2c78b93f-8347-4c41-a948-bacab534efdf\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.692228 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlkkz\" (UniqueName: \"kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-kube-api-access-rlkkz\") pod \"2c78b93f-8347-4c41-a948-bacab534efdf\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.692265 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2c78b93f-8347-4c41-a948-bacab534efdf-ca-trust-extracted\") pod \"2c78b93f-8347-4c41-a948-bacab534efdf\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.692335 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2c78b93f-8347-4c41-a948-bacab534efdf-registry-certificates\") pod \"2c78b93f-8347-4c41-a948-bacab534efdf\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.692359 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2c78b93f-8347-4c41-a948-bacab534efdf-trusted-ca\") pod \"2c78b93f-8347-4c41-a948-bacab534efdf\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.692494 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"2c78b93f-8347-4c41-a948-bacab534efdf\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.692537 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2c78b93f-8347-4c41-a948-bacab534efdf-installation-pull-secrets\") pod \"2c78b93f-8347-4c41-a948-bacab534efdf\" (UID: \"2c78b93f-8347-4c41-a948-bacab534efdf\") " Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.693827 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c78b93f-8347-4c41-a948-bacab534efdf-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "2c78b93f-8347-4c41-a948-bacab534efdf" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.694140 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c78b93f-8347-4c41-a948-bacab534efdf-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "2c78b93f-8347-4c41-a948-bacab534efdf" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.699136 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "2c78b93f-8347-4c41-a948-bacab534efdf" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.700438 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c78b93f-8347-4c41-a948-bacab534efdf-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "2c78b93f-8347-4c41-a948-bacab534efdf" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.699996 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "2c78b93f-8347-4c41-a948-bacab534efdf" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.702186 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-kube-api-access-rlkkz" (OuterVolumeSpecName: "kube-api-access-rlkkz") pod "2c78b93f-8347-4c41-a948-bacab534efdf" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf"). InnerVolumeSpecName "kube-api-access-rlkkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.704170 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "2c78b93f-8347-4c41-a948-bacab534efdf" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.712029 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c78b93f-8347-4c41-a948-bacab534efdf-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "2c78b93f-8347-4c41-a948-bacab534efdf" (UID: "2c78b93f-8347-4c41-a948-bacab534efdf"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.793950 4689 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2c78b93f-8347-4c41-a948-bacab534efdf-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.793988 4689 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2c78b93f-8347-4c41-a948-bacab534efdf-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.794000 4689 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2c78b93f-8347-4c41-a948-bacab534efdf-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.794009 4689 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2c78b93f-8347-4c41-a948-bacab534efdf-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.794018 4689 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.794026 4689 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.794034 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlkkz\" (UniqueName: \"kubernetes.io/projected/2c78b93f-8347-4c41-a948-bacab534efdf-kube-api-access-rlkkz\") on node \"crc\" DevicePath \"\"" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.970865 4689 generic.go:334] "Generic (PLEG): container finished" podID="2c78b93f-8347-4c41-a948-bacab534efdf" containerID="4bae03277b120ee45b419bfbbda877496d9a50742623443dc767e020df1555a9" exitCode=0 Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.970903 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.970919 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" event={"ID":"2c78b93f-8347-4c41-a948-bacab534efdf","Type":"ContainerDied","Data":"4bae03277b120ee45b419bfbbda877496d9a50742623443dc767e020df1555a9"} Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.971348 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-m87tp" event={"ID":"2c78b93f-8347-4c41-a948-bacab534efdf","Type":"ContainerDied","Data":"8bea0beac10e5a775a987bdbca0b7dba803b39c4d2439732caed884c66672cbf"} Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.971365 4689 scope.go:117] "RemoveContainer" containerID="4bae03277b120ee45b419bfbbda877496d9a50742623443dc767e020df1555a9" Oct 13 21:19:13 crc kubenswrapper[4689]: I1013 21:19:13.997940 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m87tp"] Oct 13 21:19:14 crc kubenswrapper[4689]: I1013 21:19:14.003465 4689 scope.go:117] "RemoveContainer" containerID="4bae03277b120ee45b419bfbbda877496d9a50742623443dc767e020df1555a9" Oct 13 21:19:14 crc kubenswrapper[4689]: I1013 21:19:14.004492 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-m87tp"] Oct 13 21:19:14 crc kubenswrapper[4689]: E1013 21:19:14.005115 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bae03277b120ee45b419bfbbda877496d9a50742623443dc767e020df1555a9\": container with ID starting with 4bae03277b120ee45b419bfbbda877496d9a50742623443dc767e020df1555a9 not found: ID does not exist" containerID="4bae03277b120ee45b419bfbbda877496d9a50742623443dc767e020df1555a9" Oct 13 21:19:14 crc kubenswrapper[4689]: I1013 21:19:14.005164 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bae03277b120ee45b419bfbbda877496d9a50742623443dc767e020df1555a9"} err="failed to get container status \"4bae03277b120ee45b419bfbbda877496d9a50742623443dc767e020df1555a9\": rpc error: code = NotFound desc = could not find container \"4bae03277b120ee45b419bfbbda877496d9a50742623443dc767e020df1555a9\": container with ID starting with 4bae03277b120ee45b419bfbbda877496d9a50742623443dc767e020df1555a9 not found: ID does not exist" Oct 13 21:19:15 crc kubenswrapper[4689]: I1013 21:19:15.877047 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c78b93f-8347-4c41-a948-bacab534efdf" path="/var/lib/kubelet/pods/2c78b93f-8347-4c41-a948-bacab534efdf/volumes" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.191979 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-ndlw8"] Oct 13 21:20:47 crc kubenswrapper[4689]: E1013 21:20:47.192667 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c78b93f-8347-4c41-a948-bacab534efdf" containerName="registry" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.192678 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c78b93f-8347-4c41-a948-bacab534efdf" containerName="registry" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.192778 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c78b93f-8347-4c41-a948-bacab534efdf" containerName="registry" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.193169 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-ndlw8" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.195338 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.195419 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.195469 4689 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-cw4h2" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.212514 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-g6vgs"] Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.213293 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-g6vgs" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.216133 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-52xlv"] Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.216636 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-52xlv" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.219791 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-ndlw8"] Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.219871 4689 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-mwv7s" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.221668 4689 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-l4tx2" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.227150 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-g6vgs"] Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.236500 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-52xlv"] Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.313753 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6zv7\" (UniqueName: \"kubernetes.io/projected/afb8356c-40a3-4270-bbe2-644b8b14482f-kube-api-access-r6zv7\") pod \"cert-manager-cainjector-7f985d654d-ndlw8\" (UID: \"afb8356c-40a3-4270-bbe2-644b8b14482f\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-ndlw8" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.313832 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qr8w7\" (UniqueName: \"kubernetes.io/projected/8e5b1294-2785-4023-857f-e404eaed07fb-kube-api-access-qr8w7\") pod \"cert-manager-5b446d88c5-g6vgs\" (UID: \"8e5b1294-2785-4023-857f-e404eaed07fb\") " pod="cert-manager/cert-manager-5b446d88c5-g6vgs" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.314015 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xw8v8\" (UniqueName: \"kubernetes.io/projected/693e1ab8-5677-4bdd-bb02-b8540de9513a-kube-api-access-xw8v8\") pod \"cert-manager-webhook-5655c58dd6-52xlv\" (UID: \"693e1ab8-5677-4bdd-bb02-b8540de9513a\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-52xlv" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.415373 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6zv7\" (UniqueName: \"kubernetes.io/projected/afb8356c-40a3-4270-bbe2-644b8b14482f-kube-api-access-r6zv7\") pod \"cert-manager-cainjector-7f985d654d-ndlw8\" (UID: \"afb8356c-40a3-4270-bbe2-644b8b14482f\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-ndlw8" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.415769 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qr8w7\" (UniqueName: \"kubernetes.io/projected/8e5b1294-2785-4023-857f-e404eaed07fb-kube-api-access-qr8w7\") pod \"cert-manager-5b446d88c5-g6vgs\" (UID: \"8e5b1294-2785-4023-857f-e404eaed07fb\") " pod="cert-manager/cert-manager-5b446d88c5-g6vgs" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.415810 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xw8v8\" (UniqueName: \"kubernetes.io/projected/693e1ab8-5677-4bdd-bb02-b8540de9513a-kube-api-access-xw8v8\") pod \"cert-manager-webhook-5655c58dd6-52xlv\" (UID: \"693e1ab8-5677-4bdd-bb02-b8540de9513a\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-52xlv" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.432416 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6zv7\" (UniqueName: \"kubernetes.io/projected/afb8356c-40a3-4270-bbe2-644b8b14482f-kube-api-access-r6zv7\") pod \"cert-manager-cainjector-7f985d654d-ndlw8\" (UID: \"afb8356c-40a3-4270-bbe2-644b8b14482f\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-ndlw8" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.434039 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xw8v8\" (UniqueName: \"kubernetes.io/projected/693e1ab8-5677-4bdd-bb02-b8540de9513a-kube-api-access-xw8v8\") pod \"cert-manager-webhook-5655c58dd6-52xlv\" (UID: \"693e1ab8-5677-4bdd-bb02-b8540de9513a\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-52xlv" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.434124 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qr8w7\" (UniqueName: \"kubernetes.io/projected/8e5b1294-2785-4023-857f-e404eaed07fb-kube-api-access-qr8w7\") pod \"cert-manager-5b446d88c5-g6vgs\" (UID: \"8e5b1294-2785-4023-857f-e404eaed07fb\") " pod="cert-manager/cert-manager-5b446d88c5-g6vgs" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.507965 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-ndlw8" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.529445 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-g6vgs" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.542883 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-52xlv" Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.797079 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-52xlv"] Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.803375 4689 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.936693 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-g6vgs"] Oct 13 21:20:47 crc kubenswrapper[4689]: I1013 21:20:47.969092 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-ndlw8"] Oct 13 21:20:47 crc kubenswrapper[4689]: W1013 21:20:47.974709 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podafb8356c_40a3_4270_bbe2_644b8b14482f.slice/crio-26d19cb822ef3c171cc0cd6be370b1337f14ea5282daef873ab94c62e902a3de WatchSource:0}: Error finding container 26d19cb822ef3c171cc0cd6be370b1337f14ea5282daef873ab94c62e902a3de: Status 404 returned error can't find the container with id 26d19cb822ef3c171cc0cd6be370b1337f14ea5282daef873ab94c62e902a3de Oct 13 21:20:48 crc kubenswrapper[4689]: I1013 21:20:48.505456 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-g6vgs" event={"ID":"8e5b1294-2785-4023-857f-e404eaed07fb","Type":"ContainerStarted","Data":"3184537e6dd40d44050d8afb903cc0ad3ad8286b7986832f41be7ad92c3864dd"} Oct 13 21:20:48 crc kubenswrapper[4689]: I1013 21:20:48.506414 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-52xlv" event={"ID":"693e1ab8-5677-4bdd-bb02-b8540de9513a","Type":"ContainerStarted","Data":"a57451e197cc2c2e5c83061f46aebc7eda8fe522f8ad28f25c9820641ed36380"} Oct 13 21:20:48 crc kubenswrapper[4689]: I1013 21:20:48.507351 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-ndlw8" event={"ID":"afb8356c-40a3-4270-bbe2-644b8b14482f","Type":"ContainerStarted","Data":"26d19cb822ef3c171cc0cd6be370b1337f14ea5282daef873ab94c62e902a3de"} Oct 13 21:20:51 crc kubenswrapper[4689]: I1013 21:20:51.529071 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-g6vgs" event={"ID":"8e5b1294-2785-4023-857f-e404eaed07fb","Type":"ContainerStarted","Data":"71c6d08b3d8b802678e1150de6c1e799695bc69fe50450bc32c24f612e05b882"} Oct 13 21:20:51 crc kubenswrapper[4689]: I1013 21:20:51.530685 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-52xlv" event={"ID":"693e1ab8-5677-4bdd-bb02-b8540de9513a","Type":"ContainerStarted","Data":"8f1e5f18cc975e6e1545d21ddbdb5a3bce8436de4d35df4495de8e30704584ac"} Oct 13 21:20:51 crc kubenswrapper[4689]: I1013 21:20:51.530829 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-52xlv" Oct 13 21:20:51 crc kubenswrapper[4689]: I1013 21:20:51.532343 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-ndlw8" event={"ID":"afb8356c-40a3-4270-bbe2-644b8b14482f","Type":"ContainerStarted","Data":"abddbbdc5c5902388618e78d12f40e13adedf9b5ef7db0f2c57c4a2294596f80"} Oct 13 21:20:51 crc kubenswrapper[4689]: I1013 21:20:51.548655 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-g6vgs" podStartSLOduration=1.491337572 podStartE2EDuration="4.548629777s" podCreationTimestamp="2025-10-13 21:20:47 +0000 UTC" firstStartedPulling="2025-10-13 21:20:47.945736782 +0000 UTC m=+564.863981867" lastFinishedPulling="2025-10-13 21:20:51.003028987 +0000 UTC m=+567.921274072" observedRunningTime="2025-10-13 21:20:51.544397061 +0000 UTC m=+568.462642196" watchObservedRunningTime="2025-10-13 21:20:51.548629777 +0000 UTC m=+568.466874872" Oct 13 21:20:51 crc kubenswrapper[4689]: I1013 21:20:51.572845 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-ndlw8" podStartSLOduration=1.546792599 podStartE2EDuration="4.572822835s" podCreationTimestamp="2025-10-13 21:20:47 +0000 UTC" firstStartedPulling="2025-10-13 21:20:47.977052642 +0000 UTC m=+564.895297727" lastFinishedPulling="2025-10-13 21:20:51.003082878 +0000 UTC m=+567.921327963" observedRunningTime="2025-10-13 21:20:51.566318659 +0000 UTC m=+568.484563754" watchObservedRunningTime="2025-10-13 21:20:51.572822835 +0000 UTC m=+568.491067930" Oct 13 21:20:51 crc kubenswrapper[4689]: I1013 21:20:51.589259 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-52xlv" podStartSLOduration=1.313149131 podStartE2EDuration="4.589238908s" podCreationTimestamp="2025-10-13 21:20:47 +0000 UTC" firstStartedPulling="2025-10-13 21:20:47.803070927 +0000 UTC m=+564.721316012" lastFinishedPulling="2025-10-13 21:20:51.079160704 +0000 UTC m=+567.997405789" observedRunningTime="2025-10-13 21:20:51.586423234 +0000 UTC m=+568.504668329" watchObservedRunningTime="2025-10-13 21:20:51.589238908 +0000 UTC m=+568.507483993" Oct 13 21:20:53 crc kubenswrapper[4689]: I1013 21:20:53.858714 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:20:53 crc kubenswrapper[4689]: I1013 21:20:53.859076 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:20:57 crc kubenswrapper[4689]: I1013 21:20:57.547396 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-52xlv" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.002388 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xml6c"] Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.002804 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovn-controller" containerID="cri-o://2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7" gracePeriod=30 Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.003115 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397" gracePeriod=30 Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.003179 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="kube-rbac-proxy-node" containerID="cri-o://0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939" gracePeriod=30 Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.003220 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovn-acl-logging" containerID="cri-o://28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934" gracePeriod=30 Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.003051 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="northd" containerID="cri-o://de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c" gracePeriod=30 Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.003451 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="sbdb" containerID="cri-o://a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029" gracePeriod=30 Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.003547 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="nbdb" containerID="cri-o://9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef" gracePeriod=30 Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.035745 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovnkube-controller" containerID="cri-o://3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d" gracePeriod=30 Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.395973 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovnkube-controller/3.log" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.397924 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovn-acl-logging/0.log" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.398390 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovn-controller/0.log" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.398746 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.442526 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-8qkdj"] Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.442768 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovnkube-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.442782 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovnkube-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.442790 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="sbdb" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.442796 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="sbdb" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.442806 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="kubecfg-setup" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.442812 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="kubecfg-setup" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.442821 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="northd" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.442827 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="northd" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.442835 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="nbdb" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.442841 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="nbdb" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.442849 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovn-acl-logging" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.442854 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovn-acl-logging" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.442864 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="kube-rbac-proxy-node" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.442869 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="kube-rbac-proxy-node" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.442878 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="kube-rbac-proxy-ovn-metrics" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.442884 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="kube-rbac-proxy-ovn-metrics" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.442891 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovnkube-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.442896 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovnkube-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.442903 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovn-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.442910 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovn-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.442918 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovnkube-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.442925 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovnkube-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.442933 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovnkube-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.442939 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovnkube-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.443018 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovnkube-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.443025 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovnkube-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.443034 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovn-acl-logging" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.443040 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovnkube-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.443046 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="sbdb" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.443054 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="kube-rbac-proxy-node" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.443060 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="nbdb" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.443069 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="kube-rbac-proxy-ovn-metrics" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.443079 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovnkube-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.443086 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovn-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.443093 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="northd" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.443174 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovnkube-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.443181 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovnkube-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.443272 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerName="ovnkube-controller" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.444749 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456361 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456439 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-node-log\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456465 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-slash\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456489 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-run-ovn-kubernetes\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456517 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-ovnkube-config\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456545 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-kubelet\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456573 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-ovn\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456616 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-cni-netd\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456689 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-ovnkube-script-lib\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456730 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-cni-bin\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456753 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-run-netns\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456773 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-var-lib-openvswitch\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456794 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-etc-openvswitch\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456817 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-openvswitch\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456846 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-log-socket\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456875 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5736acbe-9793-447e-9e22-76b0f407bfb7-ovn-node-metrics-cert\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456900 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-env-overrides\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456937 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9qn8m\" (UniqueName: \"kubernetes.io/projected/5736acbe-9793-447e-9e22-76b0f407bfb7-kube-api-access-9qn8m\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456965 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-systemd\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.456991 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-systemd-units\") pod \"5736acbe-9793-447e-9e22-76b0f407bfb7\" (UID: \"5736acbe-9793-447e-9e22-76b0f407bfb7\") " Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.457201 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.457226 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.457237 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.457254 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-log-socket" (OuterVolumeSpecName: "log-socket") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.457264 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-node-log" (OuterVolumeSpecName: "node-log") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.457281 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.457289 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.457323 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.457314 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.457359 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.457349 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.457379 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-slash" (OuterVolumeSpecName: "host-slash") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.457400 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.457701 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.457732 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.458854 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.459059 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.464688 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5736acbe-9793-447e-9e22-76b0f407bfb7-kube-api-access-9qn8m" (OuterVolumeSpecName: "kube-api-access-9qn8m") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "kube-api-access-9qn8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.465220 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5736acbe-9793-447e-9e22-76b0f407bfb7-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.475400 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "5736acbe-9793-447e-9e22-76b0f407bfb7" (UID: "5736acbe-9793-447e-9e22-76b0f407bfb7"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558216 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-run-ovn\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558263 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-log-socket\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558286 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-systemd-units\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558321 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/095f0be4-305d-46ca-b590-31b9d246653d-ovnkube-script-lib\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558338 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-slash\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558352 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-var-lib-openvswitch\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558367 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-node-log\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558385 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wvlp\" (UniqueName: \"kubernetes.io/projected/095f0be4-305d-46ca-b590-31b9d246653d-kube-api-access-4wvlp\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558400 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-kubelet\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558441 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-run-ovn-kubernetes\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558472 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-etc-openvswitch\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558486 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-run-netns\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558500 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-cni-netd\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558517 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-cni-bin\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558532 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-run-systemd\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558546 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-run-openvswitch\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558562 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558593 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/095f0be4-305d-46ca-b590-31b9d246653d-env-overrides\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558634 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/095f0be4-305d-46ca-b590-31b9d246653d-ovnkube-config\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558651 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/095f0be4-305d-46ca-b590-31b9d246653d-ovn-node-metrics-cert\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558716 4689 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558727 4689 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558736 4689 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558744 4689 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558753 4689 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558761 4689 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558769 4689 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558777 4689 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-log-socket\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558785 4689 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558793 4689 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5736acbe-9793-447e-9e22-76b0f407bfb7-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558801 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9qn8m\" (UniqueName: \"kubernetes.io/projected/5736acbe-9793-447e-9e22-76b0f407bfb7-kube-api-access-9qn8m\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558809 4689 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558818 4689 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558828 4689 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558838 4689 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-node-log\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558846 4689 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-slash\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558854 4689 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558862 4689 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5736acbe-9793-447e-9e22-76b0f407bfb7-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558869 4689 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.558878 4689 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5736acbe-9793-447e-9e22-76b0f407bfb7-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.574513 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovnkube-controller/3.log" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.576703 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovn-acl-logging/0.log" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577301 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xml6c_5736acbe-9793-447e-9e22-76b0f407bfb7/ovn-controller/0.log" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577654 4689 generic.go:334] "Generic (PLEG): container finished" podID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerID="3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d" exitCode=0 Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577675 4689 generic.go:334] "Generic (PLEG): container finished" podID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerID="a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029" exitCode=0 Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577685 4689 generic.go:334] "Generic (PLEG): container finished" podID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerID="9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef" exitCode=0 Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577692 4689 generic.go:334] "Generic (PLEG): container finished" podID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerID="de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c" exitCode=0 Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577698 4689 generic.go:334] "Generic (PLEG): container finished" podID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerID="673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397" exitCode=0 Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577704 4689 generic.go:334] "Generic (PLEG): container finished" podID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerID="0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939" exitCode=0 Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577711 4689 generic.go:334] "Generic (PLEG): container finished" podID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerID="28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934" exitCode=143 Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577717 4689 generic.go:334] "Generic (PLEG): container finished" podID="5736acbe-9793-447e-9e22-76b0f407bfb7" containerID="2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7" exitCode=143 Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577794 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerDied","Data":"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577818 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577852 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerDied","Data":"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577871 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerDied","Data":"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577884 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerDied","Data":"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577896 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerDied","Data":"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577907 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerDied","Data":"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577920 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577934 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577941 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577948 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577954 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577961 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577967 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577974 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577981 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.577989 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerDied","Data":"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578001 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578009 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578016 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578022 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578029 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578035 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578041 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578047 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578053 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578059 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578070 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerDied","Data":"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578080 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578088 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578094 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578101 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578107 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578113 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578119 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578125 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578131 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578137 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578147 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xml6c" event={"ID":"5736acbe-9793-447e-9e22-76b0f407bfb7","Type":"ContainerDied","Data":"e36d3a3bd648cf9b359177af5b7bb1867f9b32b01bb0ec44207846f1d70c27be"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578158 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578165 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578171 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578178 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578184 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578190 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578196 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578202 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578209 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578216 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.578232 4689 scope.go:117] "RemoveContainer" containerID="3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.585101 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xr7rr_632b68ca-d2a4-4570-a0a2-8ea8d204fb59/kube-multus/2.log" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.585572 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xr7rr_632b68ca-d2a4-4570-a0a2-8ea8d204fb59/kube-multus/1.log" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.585683 4689 generic.go:334] "Generic (PLEG): container finished" podID="632b68ca-d2a4-4570-a0a2-8ea8d204fb59" containerID="5385df0cedf34a0e0af7240e6b65d843bf09de075d7669b439235c390b995e45" exitCode=2 Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.585744 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xr7rr" event={"ID":"632b68ca-d2a4-4570-a0a2-8ea8d204fb59","Type":"ContainerDied","Data":"5385df0cedf34a0e0af7240e6b65d843bf09de075d7669b439235c390b995e45"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.585771 4689 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"99a0f1440b3ddf29def15a59eb83538501635a83344f4310c08091290ffd6d99"} Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.586343 4689 scope.go:117] "RemoveContainer" containerID="5385df0cedf34a0e0af7240e6b65d843bf09de075d7669b439235c390b995e45" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.586722 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-xr7rr_openshift-multus(632b68ca-d2a4-4570-a0a2-8ea8d204fb59)\"" pod="openshift-multus/multus-xr7rr" podUID="632b68ca-d2a4-4570-a0a2-8ea8d204fb59" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.606870 4689 scope.go:117] "RemoveContainer" containerID="7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.628305 4689 scope.go:117] "RemoveContainer" containerID="a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.638789 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xml6c"] Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.647858 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xml6c"] Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.654713 4689 scope.go:117] "RemoveContainer" containerID="9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660372 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-log-socket\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660424 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-systemd-units\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660457 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/095f0be4-305d-46ca-b590-31b9d246653d-ovnkube-script-lib\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660478 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-slash\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660495 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-var-lib-openvswitch\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660510 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-node-log\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660512 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-log-socket\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660529 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wvlp\" (UniqueName: \"kubernetes.io/projected/095f0be4-305d-46ca-b590-31b9d246653d-kube-api-access-4wvlp\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660569 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-systemd-units\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660599 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-slash\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660676 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-var-lib-openvswitch\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660697 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-kubelet\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660808 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-run-ovn-kubernetes\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660852 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-etc-openvswitch\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660881 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-run-netns\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660900 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-cni-netd\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660927 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-cni-bin\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660943 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-run-systemd\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660961 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-run-openvswitch\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660978 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.660996 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/095f0be4-305d-46ca-b590-31b9d246653d-env-overrides\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.661020 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-etc-openvswitch\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.661050 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/095f0be4-305d-46ca-b590-31b9d246653d-ovnkube-config\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.661055 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-node-log\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.661096 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/095f0be4-305d-46ca-b590-31b9d246653d-ovn-node-metrics-cert\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.661175 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-run-ovn\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.661217 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/095f0be4-305d-46ca-b590-31b9d246653d-ovnkube-script-lib\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.661254 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-run-ovn-kubernetes\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.661802 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-run-ovn\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.661828 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.661845 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-run-netns\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.661861 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-cni-netd\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.661873 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-run-systemd\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.661894 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-run-openvswitch\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.661916 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-kubelet\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.661934 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/095f0be4-305d-46ca-b590-31b9d246653d-host-cni-bin\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.661964 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/095f0be4-305d-46ca-b590-31b9d246653d-ovnkube-config\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.662288 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/095f0be4-305d-46ca-b590-31b9d246653d-env-overrides\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.669845 4689 scope.go:117] "RemoveContainer" containerID="de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.670092 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/095f0be4-305d-46ca-b590-31b9d246653d-ovn-node-metrics-cert\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.678155 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wvlp\" (UniqueName: \"kubernetes.io/projected/095f0be4-305d-46ca-b590-31b9d246653d-kube-api-access-4wvlp\") pod \"ovnkube-node-8qkdj\" (UID: \"095f0be4-305d-46ca-b590-31b9d246653d\") " pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.682464 4689 scope.go:117] "RemoveContainer" containerID="673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.693285 4689 scope.go:117] "RemoveContainer" containerID="0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.704902 4689 scope.go:117] "RemoveContainer" containerID="28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.718261 4689 scope.go:117] "RemoveContainer" containerID="2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.729641 4689 scope.go:117] "RemoveContainer" containerID="108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.742515 4689 scope.go:117] "RemoveContainer" containerID="3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.743010 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d\": container with ID starting with 3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d not found: ID does not exist" containerID="3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.743056 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d"} err="failed to get container status \"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d\": rpc error: code = NotFound desc = could not find container \"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d\": container with ID starting with 3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.743091 4689 scope.go:117] "RemoveContainer" containerID="7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.743470 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\": container with ID starting with 7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1 not found: ID does not exist" containerID="7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.743515 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1"} err="failed to get container status \"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\": rpc error: code = NotFound desc = could not find container \"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\": container with ID starting with 7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.743548 4689 scope.go:117] "RemoveContainer" containerID="a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.743954 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\": container with ID starting with a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029 not found: ID does not exist" containerID="a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.743978 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029"} err="failed to get container status \"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\": rpc error: code = NotFound desc = could not find container \"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\": container with ID starting with a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.743994 4689 scope.go:117] "RemoveContainer" containerID="9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.744253 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\": container with ID starting with 9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef not found: ID does not exist" containerID="9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.744284 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef"} err="failed to get container status \"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\": rpc error: code = NotFound desc = could not find container \"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\": container with ID starting with 9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.744306 4689 scope.go:117] "RemoveContainer" containerID="de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.744542 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\": container with ID starting with de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c not found: ID does not exist" containerID="de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.744565 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c"} err="failed to get container status \"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\": rpc error: code = NotFound desc = could not find container \"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\": container with ID starting with de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.744579 4689 scope.go:117] "RemoveContainer" containerID="673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.744848 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\": container with ID starting with 673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397 not found: ID does not exist" containerID="673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.744881 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397"} err="failed to get container status \"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\": rpc error: code = NotFound desc = could not find container \"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\": container with ID starting with 673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.744901 4689 scope.go:117] "RemoveContainer" containerID="0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.745133 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\": container with ID starting with 0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939 not found: ID does not exist" containerID="0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.745163 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939"} err="failed to get container status \"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\": rpc error: code = NotFound desc = could not find container \"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\": container with ID starting with 0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.745184 4689 scope.go:117] "RemoveContainer" containerID="28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.745510 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\": container with ID starting with 28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934 not found: ID does not exist" containerID="28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.745538 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934"} err="failed to get container status \"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\": rpc error: code = NotFound desc = could not find container \"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\": container with ID starting with 28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.745557 4689 scope.go:117] "RemoveContainer" containerID="2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.746002 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\": container with ID starting with 2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7 not found: ID does not exist" containerID="2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.746081 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7"} err="failed to get container status \"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\": rpc error: code = NotFound desc = could not find container \"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\": container with ID starting with 2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.746170 4689 scope.go:117] "RemoveContainer" containerID="108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1" Oct 13 21:20:58 crc kubenswrapper[4689]: E1013 21:20:58.746532 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\": container with ID starting with 108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1 not found: ID does not exist" containerID="108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.746561 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1"} err="failed to get container status \"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\": rpc error: code = NotFound desc = could not find container \"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\": container with ID starting with 108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.746643 4689 scope.go:117] "RemoveContainer" containerID="3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.746937 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d"} err="failed to get container status \"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d\": rpc error: code = NotFound desc = could not find container \"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d\": container with ID starting with 3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.746974 4689 scope.go:117] "RemoveContainer" containerID="7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.747344 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1"} err="failed to get container status \"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\": rpc error: code = NotFound desc = could not find container \"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\": container with ID starting with 7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.747417 4689 scope.go:117] "RemoveContainer" containerID="a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.747812 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029"} err="failed to get container status \"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\": rpc error: code = NotFound desc = could not find container \"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\": container with ID starting with a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.747837 4689 scope.go:117] "RemoveContainer" containerID="9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.748133 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef"} err="failed to get container status \"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\": rpc error: code = NotFound desc = could not find container \"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\": container with ID starting with 9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.748171 4689 scope.go:117] "RemoveContainer" containerID="de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.748461 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c"} err="failed to get container status \"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\": rpc error: code = NotFound desc = could not find container \"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\": container with ID starting with de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.748491 4689 scope.go:117] "RemoveContainer" containerID="673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.748746 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397"} err="failed to get container status \"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\": rpc error: code = NotFound desc = could not find container \"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\": container with ID starting with 673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.748774 4689 scope.go:117] "RemoveContainer" containerID="0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.749018 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939"} err="failed to get container status \"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\": rpc error: code = NotFound desc = could not find container \"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\": container with ID starting with 0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.749041 4689 scope.go:117] "RemoveContainer" containerID="28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.749301 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934"} err="failed to get container status \"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\": rpc error: code = NotFound desc = could not find container \"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\": container with ID starting with 28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.749327 4689 scope.go:117] "RemoveContainer" containerID="2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.749666 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7"} err="failed to get container status \"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\": rpc error: code = NotFound desc = could not find container \"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\": container with ID starting with 2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.749690 4689 scope.go:117] "RemoveContainer" containerID="108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.749975 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1"} err="failed to get container status \"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\": rpc error: code = NotFound desc = could not find container \"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\": container with ID starting with 108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.750015 4689 scope.go:117] "RemoveContainer" containerID="3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.750282 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d"} err="failed to get container status \"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d\": rpc error: code = NotFound desc = could not find container \"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d\": container with ID starting with 3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.750311 4689 scope.go:117] "RemoveContainer" containerID="7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.750646 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1"} err="failed to get container status \"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\": rpc error: code = NotFound desc = could not find container \"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\": container with ID starting with 7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.750681 4689 scope.go:117] "RemoveContainer" containerID="a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.751024 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029"} err="failed to get container status \"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\": rpc error: code = NotFound desc = could not find container \"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\": container with ID starting with a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.751057 4689 scope.go:117] "RemoveContainer" containerID="9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.751353 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef"} err="failed to get container status \"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\": rpc error: code = NotFound desc = could not find container \"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\": container with ID starting with 9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.751385 4689 scope.go:117] "RemoveContainer" containerID="de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.751712 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c"} err="failed to get container status \"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\": rpc error: code = NotFound desc = could not find container \"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\": container with ID starting with de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.751741 4689 scope.go:117] "RemoveContainer" containerID="673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.752104 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397"} err="failed to get container status \"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\": rpc error: code = NotFound desc = could not find container \"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\": container with ID starting with 673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.752134 4689 scope.go:117] "RemoveContainer" containerID="0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.752416 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939"} err="failed to get container status \"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\": rpc error: code = NotFound desc = could not find container \"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\": container with ID starting with 0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.752447 4689 scope.go:117] "RemoveContainer" containerID="28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.752776 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934"} err="failed to get container status \"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\": rpc error: code = NotFound desc = could not find container \"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\": container with ID starting with 28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.752802 4689 scope.go:117] "RemoveContainer" containerID="2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.753039 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7"} err="failed to get container status \"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\": rpc error: code = NotFound desc = could not find container \"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\": container with ID starting with 2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.753071 4689 scope.go:117] "RemoveContainer" containerID="108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.753386 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1"} err="failed to get container status \"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\": rpc error: code = NotFound desc = could not find container \"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\": container with ID starting with 108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.753412 4689 scope.go:117] "RemoveContainer" containerID="3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.753682 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d"} err="failed to get container status \"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d\": rpc error: code = NotFound desc = could not find container \"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d\": container with ID starting with 3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.753711 4689 scope.go:117] "RemoveContainer" containerID="7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.753975 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1"} err="failed to get container status \"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\": rpc error: code = NotFound desc = could not find container \"7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1\": container with ID starting with 7a9a0133a711d396f3e56ac647a8ab3472001703dfc0944ae2391a016d0fc9b1 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.754028 4689 scope.go:117] "RemoveContainer" containerID="a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.754283 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029"} err="failed to get container status \"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\": rpc error: code = NotFound desc = could not find container \"a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029\": container with ID starting with a7a57895a117719cd12ae3738dc346d814ac3e9806e21325cb2b808d9a7fa029 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.754312 4689 scope.go:117] "RemoveContainer" containerID="9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.754623 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef"} err="failed to get container status \"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\": rpc error: code = NotFound desc = could not find container \"9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef\": container with ID starting with 9319e01d1acdc0c17ada51cd7d56ef2526e04e758b0dcb8477f35e6bd7aba5ef not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.754664 4689 scope.go:117] "RemoveContainer" containerID="de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.755045 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c"} err="failed to get container status \"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\": rpc error: code = NotFound desc = could not find container \"de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c\": container with ID starting with de21e7e332c1eee739400899c13a7c248b4a9c1fc54d39664d24fbe048628f7c not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.755073 4689 scope.go:117] "RemoveContainer" containerID="673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.755310 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397"} err="failed to get container status \"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\": rpc error: code = NotFound desc = could not find container \"673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397\": container with ID starting with 673b9a2721c99bec56c9163d18b82f7df69efe9cc78a3947573a5c81b78d3397 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.755346 4689 scope.go:117] "RemoveContainer" containerID="0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.755803 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939"} err="failed to get container status \"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\": rpc error: code = NotFound desc = could not find container \"0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939\": container with ID starting with 0843fe4c0ced67c51cf47c76135e691054aa718112ca578b0b6279db3e532939 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.755838 4689 scope.go:117] "RemoveContainer" containerID="28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.756166 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934"} err="failed to get container status \"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\": rpc error: code = NotFound desc = could not find container \"28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934\": container with ID starting with 28a8d97ca1dd44e779c3b48e1c7e70432d1274730794bcf06c7ed4d4ff867934 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.756208 4689 scope.go:117] "RemoveContainer" containerID="2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.756701 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7"} err="failed to get container status \"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\": rpc error: code = NotFound desc = could not find container \"2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7\": container with ID starting with 2ba6e698bef715ab3c29f192e77d570917048e220f4051ee1684fda48205fdf7 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.756728 4689 scope.go:117] "RemoveContainer" containerID="108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.757118 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1"} err="failed to get container status \"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\": rpc error: code = NotFound desc = could not find container \"108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1\": container with ID starting with 108de11948788304a3575ffb8f7b4273dc7fc6dbde53865880aec305ed8cd6e1 not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.757156 4689 scope.go:117] "RemoveContainer" containerID="3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.757632 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:20:58 crc kubenswrapper[4689]: I1013 21:20:58.757748 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d"} err="failed to get container status \"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d\": rpc error: code = NotFound desc = could not find container \"3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d\": container with ID starting with 3e5fc3aedbc5949d55ceefcf08a71b551f21b8ba2dda342b55b91917f2c9c80d not found: ID does not exist" Oct 13 21:20:58 crc kubenswrapper[4689]: W1013 21:20:58.776437 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod095f0be4_305d_46ca_b590_31b9d246653d.slice/crio-040d3d74f6d539b228d40bc607b9cdbdd6c60344fc2c3b5050e3b3e767fa5f6a WatchSource:0}: Error finding container 040d3d74f6d539b228d40bc607b9cdbdd6c60344fc2c3b5050e3b3e767fa5f6a: Status 404 returned error can't find the container with id 040d3d74f6d539b228d40bc607b9cdbdd6c60344fc2c3b5050e3b3e767fa5f6a Oct 13 21:20:59 crc kubenswrapper[4689]: I1013 21:20:59.592367 4689 generic.go:334] "Generic (PLEG): container finished" podID="095f0be4-305d-46ca-b590-31b9d246653d" containerID="6140a99de41d6de09dd0c4e2af6c30b1aad6499a93a574122c05c34ab488c0c9" exitCode=0 Oct 13 21:20:59 crc kubenswrapper[4689]: I1013 21:20:59.592416 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" event={"ID":"095f0be4-305d-46ca-b590-31b9d246653d","Type":"ContainerDied","Data":"6140a99de41d6de09dd0c4e2af6c30b1aad6499a93a574122c05c34ab488c0c9"} Oct 13 21:20:59 crc kubenswrapper[4689]: I1013 21:20:59.592454 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" event={"ID":"095f0be4-305d-46ca-b590-31b9d246653d","Type":"ContainerStarted","Data":"040d3d74f6d539b228d40bc607b9cdbdd6c60344fc2c3b5050e3b3e767fa5f6a"} Oct 13 21:20:59 crc kubenswrapper[4689]: I1013 21:20:59.874910 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5736acbe-9793-447e-9e22-76b0f407bfb7" path="/var/lib/kubelet/pods/5736acbe-9793-447e-9e22-76b0f407bfb7/volumes" Oct 13 21:21:00 crc kubenswrapper[4689]: I1013 21:21:00.600363 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" event={"ID":"095f0be4-305d-46ca-b590-31b9d246653d","Type":"ContainerStarted","Data":"a1e03a493b7a751531b09351c08ef7977c7bd125aaf2605f2a8ab83b34c52ca7"} Oct 13 21:21:00 crc kubenswrapper[4689]: I1013 21:21:00.600401 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" event={"ID":"095f0be4-305d-46ca-b590-31b9d246653d","Type":"ContainerStarted","Data":"a415b13810edc10d0696c5a19b13e2b44406886d51012760b85a3d832058ca4d"} Oct 13 21:21:00 crc kubenswrapper[4689]: I1013 21:21:00.600411 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" event={"ID":"095f0be4-305d-46ca-b590-31b9d246653d","Type":"ContainerStarted","Data":"5a2c8f777263439235786469c925873f19544cdc8b7bde9737a36ac517608569"} Oct 13 21:21:00 crc kubenswrapper[4689]: I1013 21:21:00.600421 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" event={"ID":"095f0be4-305d-46ca-b590-31b9d246653d","Type":"ContainerStarted","Data":"f4c3e7c83310f63bf98a07fa21b9f82d6377c0bd611aac8fb5af90bc3d025a23"} Oct 13 21:21:00 crc kubenswrapper[4689]: I1013 21:21:00.600429 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" event={"ID":"095f0be4-305d-46ca-b590-31b9d246653d","Type":"ContainerStarted","Data":"94ce9b57a53109a28a2b754b8dd4e619696da2ec2dbc71b3c73968c9ce6cd439"} Oct 13 21:21:00 crc kubenswrapper[4689]: I1013 21:21:00.600437 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" event={"ID":"095f0be4-305d-46ca-b590-31b9d246653d","Type":"ContainerStarted","Data":"c867e9cc17fe803da4a4fc7795ccc9d327fdd59cab59c978f00dea4540d9e2c6"} Oct 13 21:21:03 crc kubenswrapper[4689]: I1013 21:21:03.630727 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" event={"ID":"095f0be4-305d-46ca-b590-31b9d246653d","Type":"ContainerStarted","Data":"f71c202ab85de1a53bb2f3b0d44c1a484391e22ca9d8557bea68d97c1f506cc8"} Oct 13 21:21:05 crc kubenswrapper[4689]: I1013 21:21:05.645288 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" event={"ID":"095f0be4-305d-46ca-b590-31b9d246653d","Type":"ContainerStarted","Data":"1d6d2359a0ac8e862504286258f281edd6c6b22f7ffa9387dbe285b9d1caf8ed"} Oct 13 21:21:05 crc kubenswrapper[4689]: I1013 21:21:05.645994 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:21:05 crc kubenswrapper[4689]: I1013 21:21:05.677849 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:21:05 crc kubenswrapper[4689]: I1013 21:21:05.679071 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" podStartSLOduration=7.679061018 podStartE2EDuration="7.679061018s" podCreationTimestamp="2025-10-13 21:20:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:21:05.674914154 +0000 UTC m=+582.593159249" watchObservedRunningTime="2025-10-13 21:21:05.679061018 +0000 UTC m=+582.597306103" Oct 13 21:21:06 crc kubenswrapper[4689]: I1013 21:21:06.652953 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:21:06 crc kubenswrapper[4689]: I1013 21:21:06.652997 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:21:06 crc kubenswrapper[4689]: I1013 21:21:06.698130 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:21:12 crc kubenswrapper[4689]: I1013 21:21:12.867842 4689 scope.go:117] "RemoveContainer" containerID="5385df0cedf34a0e0af7240e6b65d843bf09de075d7669b439235c390b995e45" Oct 13 21:21:12 crc kubenswrapper[4689]: E1013 21:21:12.870711 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-xr7rr_openshift-multus(632b68ca-d2a4-4570-a0a2-8ea8d204fb59)\"" pod="openshift-multus/multus-xr7rr" podUID="632b68ca-d2a4-4570-a0a2-8ea8d204fb59" Oct 13 21:21:23 crc kubenswrapper[4689]: I1013 21:21:23.859008 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:21:23 crc kubenswrapper[4689]: I1013 21:21:23.859905 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:21:24 crc kubenswrapper[4689]: I1013 21:21:24.073249 4689 scope.go:117] "RemoveContainer" containerID="99a0f1440b3ddf29def15a59eb83538501635a83344f4310c08091290ffd6d99" Oct 13 21:21:24 crc kubenswrapper[4689]: I1013 21:21:24.765012 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xr7rr_632b68ca-d2a4-4570-a0a2-8ea8d204fb59/kube-multus/2.log" Oct 13 21:21:26 crc kubenswrapper[4689]: I1013 21:21:26.867194 4689 scope.go:117] "RemoveContainer" containerID="5385df0cedf34a0e0af7240e6b65d843bf09de075d7669b439235c390b995e45" Oct 13 21:21:27 crc kubenswrapper[4689]: I1013 21:21:27.812486 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xr7rr_632b68ca-d2a4-4570-a0a2-8ea8d204fb59/kube-multus/2.log" Oct 13 21:21:27 crc kubenswrapper[4689]: I1013 21:21:27.812955 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xr7rr" event={"ID":"632b68ca-d2a4-4570-a0a2-8ea8d204fb59","Type":"ContainerStarted","Data":"a03296f0c10b37a1547c0f937209af55068419384125724d8fc0744d2a170125"} Oct 13 21:21:28 crc kubenswrapper[4689]: I1013 21:21:28.786570 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8qkdj" Oct 13 21:21:36 crc kubenswrapper[4689]: I1013 21:21:36.438544 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm"] Oct 13 21:21:36 crc kubenswrapper[4689]: I1013 21:21:36.440110 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" Oct 13 21:21:36 crc kubenswrapper[4689]: I1013 21:21:36.442428 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 13 21:21:36 crc kubenswrapper[4689]: I1013 21:21:36.448386 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm"] Oct 13 21:21:36 crc kubenswrapper[4689]: I1013 21:21:36.521845 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d77292a7-4976-429d-a389-525109ea00b2-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm\" (UID: \"d77292a7-4976-429d-a389-525109ea00b2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" Oct 13 21:21:36 crc kubenswrapper[4689]: I1013 21:21:36.521930 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d77292a7-4976-429d-a389-525109ea00b2-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm\" (UID: \"d77292a7-4976-429d-a389-525109ea00b2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" Oct 13 21:21:36 crc kubenswrapper[4689]: I1013 21:21:36.522025 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4xl6\" (UniqueName: \"kubernetes.io/projected/d77292a7-4976-429d-a389-525109ea00b2-kube-api-access-q4xl6\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm\" (UID: \"d77292a7-4976-429d-a389-525109ea00b2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" Oct 13 21:21:36 crc kubenswrapper[4689]: I1013 21:21:36.623791 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4xl6\" (UniqueName: \"kubernetes.io/projected/d77292a7-4976-429d-a389-525109ea00b2-kube-api-access-q4xl6\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm\" (UID: \"d77292a7-4976-429d-a389-525109ea00b2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" Oct 13 21:21:36 crc kubenswrapper[4689]: I1013 21:21:36.623892 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d77292a7-4976-429d-a389-525109ea00b2-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm\" (UID: \"d77292a7-4976-429d-a389-525109ea00b2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" Oct 13 21:21:36 crc kubenswrapper[4689]: I1013 21:21:36.623927 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d77292a7-4976-429d-a389-525109ea00b2-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm\" (UID: \"d77292a7-4976-429d-a389-525109ea00b2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" Oct 13 21:21:36 crc kubenswrapper[4689]: I1013 21:21:36.624380 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d77292a7-4976-429d-a389-525109ea00b2-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm\" (UID: \"d77292a7-4976-429d-a389-525109ea00b2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" Oct 13 21:21:36 crc kubenswrapper[4689]: I1013 21:21:36.624440 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d77292a7-4976-429d-a389-525109ea00b2-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm\" (UID: \"d77292a7-4976-429d-a389-525109ea00b2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" Oct 13 21:21:36 crc kubenswrapper[4689]: I1013 21:21:36.646488 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4xl6\" (UniqueName: \"kubernetes.io/projected/d77292a7-4976-429d-a389-525109ea00b2-kube-api-access-q4xl6\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm\" (UID: \"d77292a7-4976-429d-a389-525109ea00b2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" Oct 13 21:21:36 crc kubenswrapper[4689]: I1013 21:21:36.763269 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" Oct 13 21:21:36 crc kubenswrapper[4689]: I1013 21:21:36.958283 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm"] Oct 13 21:21:37 crc kubenswrapper[4689]: I1013 21:21:37.876910 4689 generic.go:334] "Generic (PLEG): container finished" podID="d77292a7-4976-429d-a389-525109ea00b2" containerID="6edb9a745dc58e44bf24dd9407c64f664b9fc14b6a23291d7345bf118c419efc" exitCode=0 Oct 13 21:21:37 crc kubenswrapper[4689]: I1013 21:21:37.876986 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" event={"ID":"d77292a7-4976-429d-a389-525109ea00b2","Type":"ContainerDied","Data":"6edb9a745dc58e44bf24dd9407c64f664b9fc14b6a23291d7345bf118c419efc"} Oct 13 21:21:37 crc kubenswrapper[4689]: I1013 21:21:37.877026 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" event={"ID":"d77292a7-4976-429d-a389-525109ea00b2","Type":"ContainerStarted","Data":"0bd261b891cd5519545078e9349ab79c2773b3765ec86fc7c19c192482ce5c5d"} Oct 13 21:21:39 crc kubenswrapper[4689]: I1013 21:21:39.889783 4689 generic.go:334] "Generic (PLEG): container finished" podID="d77292a7-4976-429d-a389-525109ea00b2" containerID="a8c27fb1018d608f63e37ef51959932eaf45bf4422345b10e6ac918c065ca76e" exitCode=0 Oct 13 21:21:39 crc kubenswrapper[4689]: I1013 21:21:39.890031 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" event={"ID":"d77292a7-4976-429d-a389-525109ea00b2","Type":"ContainerDied","Data":"a8c27fb1018d608f63e37ef51959932eaf45bf4422345b10e6ac918c065ca76e"} Oct 13 21:21:40 crc kubenswrapper[4689]: I1013 21:21:40.900684 4689 generic.go:334] "Generic (PLEG): container finished" podID="d77292a7-4976-429d-a389-525109ea00b2" containerID="babadaeae48933abcbeb7614818999f5435d735bea42452d8c3d166b59f05494" exitCode=0 Oct 13 21:21:40 crc kubenswrapper[4689]: I1013 21:21:40.900851 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" event={"ID":"d77292a7-4976-429d-a389-525109ea00b2","Type":"ContainerDied","Data":"babadaeae48933abcbeb7614818999f5435d735bea42452d8c3d166b59f05494"} Oct 13 21:21:42 crc kubenswrapper[4689]: I1013 21:21:42.133447 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" Oct 13 21:21:42 crc kubenswrapper[4689]: I1013 21:21:42.291081 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4xl6\" (UniqueName: \"kubernetes.io/projected/d77292a7-4976-429d-a389-525109ea00b2-kube-api-access-q4xl6\") pod \"d77292a7-4976-429d-a389-525109ea00b2\" (UID: \"d77292a7-4976-429d-a389-525109ea00b2\") " Oct 13 21:21:42 crc kubenswrapper[4689]: I1013 21:21:42.291217 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d77292a7-4976-429d-a389-525109ea00b2-util\") pod \"d77292a7-4976-429d-a389-525109ea00b2\" (UID: \"d77292a7-4976-429d-a389-525109ea00b2\") " Oct 13 21:21:42 crc kubenswrapper[4689]: I1013 21:21:42.291264 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d77292a7-4976-429d-a389-525109ea00b2-bundle\") pod \"d77292a7-4976-429d-a389-525109ea00b2\" (UID: \"d77292a7-4976-429d-a389-525109ea00b2\") " Oct 13 21:21:42 crc kubenswrapper[4689]: I1013 21:21:42.291981 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d77292a7-4976-429d-a389-525109ea00b2-bundle" (OuterVolumeSpecName: "bundle") pod "d77292a7-4976-429d-a389-525109ea00b2" (UID: "d77292a7-4976-429d-a389-525109ea00b2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:21:42 crc kubenswrapper[4689]: I1013 21:21:42.300821 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d77292a7-4976-429d-a389-525109ea00b2-kube-api-access-q4xl6" (OuterVolumeSpecName: "kube-api-access-q4xl6") pod "d77292a7-4976-429d-a389-525109ea00b2" (UID: "d77292a7-4976-429d-a389-525109ea00b2"). InnerVolumeSpecName "kube-api-access-q4xl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:21:42 crc kubenswrapper[4689]: I1013 21:21:42.304670 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d77292a7-4976-429d-a389-525109ea00b2-util" (OuterVolumeSpecName: "util") pod "d77292a7-4976-429d-a389-525109ea00b2" (UID: "d77292a7-4976-429d-a389-525109ea00b2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:21:42 crc kubenswrapper[4689]: I1013 21:21:42.392337 4689 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d77292a7-4976-429d-a389-525109ea00b2-util\") on node \"crc\" DevicePath \"\"" Oct 13 21:21:42 crc kubenswrapper[4689]: I1013 21:21:42.392616 4689 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d77292a7-4976-429d-a389-525109ea00b2-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:21:42 crc kubenswrapper[4689]: I1013 21:21:42.392684 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4xl6\" (UniqueName: \"kubernetes.io/projected/d77292a7-4976-429d-a389-525109ea00b2-kube-api-access-q4xl6\") on node \"crc\" DevicePath \"\"" Oct 13 21:21:42 crc kubenswrapper[4689]: I1013 21:21:42.917943 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" event={"ID":"d77292a7-4976-429d-a389-525109ea00b2","Type":"ContainerDied","Data":"0bd261b891cd5519545078e9349ab79c2773b3765ec86fc7c19c192482ce5c5d"} Oct 13 21:21:42 crc kubenswrapper[4689]: I1013 21:21:42.918018 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0bd261b891cd5519545078e9349ab79c2773b3765ec86fc7c19c192482ce5c5d" Oct 13 21:21:42 crc kubenswrapper[4689]: I1013 21:21:42.918044 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm" Oct 13 21:21:45 crc kubenswrapper[4689]: I1013 21:21:45.643609 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-bmmp2"] Oct 13 21:21:45 crc kubenswrapper[4689]: E1013 21:21:45.644084 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d77292a7-4976-429d-a389-525109ea00b2" containerName="pull" Oct 13 21:21:45 crc kubenswrapper[4689]: I1013 21:21:45.644096 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d77292a7-4976-429d-a389-525109ea00b2" containerName="pull" Oct 13 21:21:45 crc kubenswrapper[4689]: E1013 21:21:45.644109 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d77292a7-4976-429d-a389-525109ea00b2" containerName="util" Oct 13 21:21:45 crc kubenswrapper[4689]: I1013 21:21:45.644115 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d77292a7-4976-429d-a389-525109ea00b2" containerName="util" Oct 13 21:21:45 crc kubenswrapper[4689]: E1013 21:21:45.644133 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d77292a7-4976-429d-a389-525109ea00b2" containerName="extract" Oct 13 21:21:45 crc kubenswrapper[4689]: I1013 21:21:45.644140 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d77292a7-4976-429d-a389-525109ea00b2" containerName="extract" Oct 13 21:21:45 crc kubenswrapper[4689]: I1013 21:21:45.644226 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="d77292a7-4976-429d-a389-525109ea00b2" containerName="extract" Oct 13 21:21:45 crc kubenswrapper[4689]: I1013 21:21:45.644616 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-bmmp2" Oct 13 21:21:45 crc kubenswrapper[4689]: I1013 21:21:45.647524 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-blsft" Oct 13 21:21:45 crc kubenswrapper[4689]: I1013 21:21:45.647654 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 13 21:21:45 crc kubenswrapper[4689]: I1013 21:21:45.648181 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 13 21:21:45 crc kubenswrapper[4689]: I1013 21:21:45.670142 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-bmmp2"] Oct 13 21:21:45 crc kubenswrapper[4689]: I1013 21:21:45.839948 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vbd8\" (UniqueName: \"kubernetes.io/projected/189fae32-9490-4991-b5c0-2ba0de67d337-kube-api-access-8vbd8\") pod \"nmstate-operator-858ddd8f98-bmmp2\" (UID: \"189fae32-9490-4991-b5c0-2ba0de67d337\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-bmmp2" Oct 13 21:21:45 crc kubenswrapper[4689]: I1013 21:21:45.941157 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vbd8\" (UniqueName: \"kubernetes.io/projected/189fae32-9490-4991-b5c0-2ba0de67d337-kube-api-access-8vbd8\") pod \"nmstate-operator-858ddd8f98-bmmp2\" (UID: \"189fae32-9490-4991-b5c0-2ba0de67d337\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-bmmp2" Oct 13 21:21:45 crc kubenswrapper[4689]: I1013 21:21:45.963269 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vbd8\" (UniqueName: \"kubernetes.io/projected/189fae32-9490-4991-b5c0-2ba0de67d337-kube-api-access-8vbd8\") pod \"nmstate-operator-858ddd8f98-bmmp2\" (UID: \"189fae32-9490-4991-b5c0-2ba0de67d337\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-bmmp2" Oct 13 21:21:45 crc kubenswrapper[4689]: I1013 21:21:45.963692 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-bmmp2" Oct 13 21:21:46 crc kubenswrapper[4689]: I1013 21:21:46.133108 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-bmmp2"] Oct 13 21:21:46 crc kubenswrapper[4689]: I1013 21:21:46.961386 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-bmmp2" event={"ID":"189fae32-9490-4991-b5c0-2ba0de67d337","Type":"ContainerStarted","Data":"bd0accb9058aede845ef05b68766c57d584bdfb23fc004cf576e2e146659e33e"} Oct 13 21:21:48 crc kubenswrapper[4689]: I1013 21:21:48.992959 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-bmmp2" event={"ID":"189fae32-9490-4991-b5c0-2ba0de67d337","Type":"ContainerStarted","Data":"6bf4a3047308a244e705bd89965d2994e76c8947a6dcffc374e764f460f389ec"} Oct 13 21:21:49 crc kubenswrapper[4689]: I1013 21:21:49.025139 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-bmmp2" podStartSLOduration=1.6181842629999998 podStartE2EDuration="4.025115633s" podCreationTimestamp="2025-10-13 21:21:45 +0000 UTC" firstStartedPulling="2025-10-13 21:21:46.179787453 +0000 UTC m=+623.098032538" lastFinishedPulling="2025-10-13 21:21:48.586718823 +0000 UTC m=+625.504963908" observedRunningTime="2025-10-13 21:21:49.018444772 +0000 UTC m=+625.936689877" watchObservedRunningTime="2025-10-13 21:21:49.025115633 +0000 UTC m=+625.943360718" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.039459 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-tgngb"] Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.040516 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-tgngb" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.041859 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-l2wxt" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.052291 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-tgngb"] Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.056490 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-v7tfb"] Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.057549 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v7tfb" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.061866 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.075318 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-9nfcm"] Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.076089 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-9nfcm" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.083044 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-v7tfb"] Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.097546 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/63209f73-d6db-4f5e-9863-37c7d8555f1d-nmstate-lock\") pod \"nmstate-handler-9nfcm\" (UID: \"63209f73-d6db-4f5e-9863-37c7d8555f1d\") " pod="openshift-nmstate/nmstate-handler-9nfcm" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.097614 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54x44\" (UniqueName: \"kubernetes.io/projected/a76e7989-6bed-472b-8a4f-53227f485adb-kube-api-access-54x44\") pod \"nmstate-metrics-fdff9cb8d-tgngb\" (UID: \"a76e7989-6bed-472b-8a4f-53227f485adb\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-tgngb" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.097632 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/63209f73-d6db-4f5e-9863-37c7d8555f1d-ovs-socket\") pod \"nmstate-handler-9nfcm\" (UID: \"63209f73-d6db-4f5e-9863-37c7d8555f1d\") " pod="openshift-nmstate/nmstate-handler-9nfcm" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.097659 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/63209f73-d6db-4f5e-9863-37c7d8555f1d-dbus-socket\") pod \"nmstate-handler-9nfcm\" (UID: \"63209f73-d6db-4f5e-9863-37c7d8555f1d\") " pod="openshift-nmstate/nmstate-handler-9nfcm" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.097687 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/2f1f259b-9d4c-469c-b336-0f7c4fdac5be-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-v7tfb\" (UID: \"2f1f259b-9d4c-469c-b336-0f7c4fdac5be\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v7tfb" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.097716 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4p98x\" (UniqueName: \"kubernetes.io/projected/63209f73-d6db-4f5e-9863-37c7d8555f1d-kube-api-access-4p98x\") pod \"nmstate-handler-9nfcm\" (UID: \"63209f73-d6db-4f5e-9863-37c7d8555f1d\") " pod="openshift-nmstate/nmstate-handler-9nfcm" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.097734 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqtt5\" (UniqueName: \"kubernetes.io/projected/2f1f259b-9d4c-469c-b336-0f7c4fdac5be-kube-api-access-pqtt5\") pod \"nmstate-webhook-6cdbc54649-v7tfb\" (UID: \"2f1f259b-9d4c-469c-b336-0f7c4fdac5be\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v7tfb" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.165703 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf"] Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.166355 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.169929 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.170541 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.171819 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-psxck" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.179538 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf"] Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.198568 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4p98x\" (UniqueName: \"kubernetes.io/projected/63209f73-d6db-4f5e-9863-37c7d8555f1d-kube-api-access-4p98x\") pod \"nmstate-handler-9nfcm\" (UID: \"63209f73-d6db-4f5e-9863-37c7d8555f1d\") " pod="openshift-nmstate/nmstate-handler-9nfcm" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.198630 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqtt5\" (UniqueName: \"kubernetes.io/projected/2f1f259b-9d4c-469c-b336-0f7c4fdac5be-kube-api-access-pqtt5\") pod \"nmstate-webhook-6cdbc54649-v7tfb\" (UID: \"2f1f259b-9d4c-469c-b336-0f7c4fdac5be\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v7tfb" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.198656 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/15bec576-7113-4f6f-8f5a-ed95b3e01608-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-47vrf\" (UID: \"15bec576-7113-4f6f-8f5a-ed95b3e01608\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.198683 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/15bec576-7113-4f6f-8f5a-ed95b3e01608-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-47vrf\" (UID: \"15bec576-7113-4f6f-8f5a-ed95b3e01608\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.198712 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwgm7\" (UniqueName: \"kubernetes.io/projected/15bec576-7113-4f6f-8f5a-ed95b3e01608-kube-api-access-cwgm7\") pod \"nmstate-console-plugin-6b874cbd85-47vrf\" (UID: \"15bec576-7113-4f6f-8f5a-ed95b3e01608\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.198733 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/63209f73-d6db-4f5e-9863-37c7d8555f1d-nmstate-lock\") pod \"nmstate-handler-9nfcm\" (UID: \"63209f73-d6db-4f5e-9863-37c7d8555f1d\") " pod="openshift-nmstate/nmstate-handler-9nfcm" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.198765 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54x44\" (UniqueName: \"kubernetes.io/projected/a76e7989-6bed-472b-8a4f-53227f485adb-kube-api-access-54x44\") pod \"nmstate-metrics-fdff9cb8d-tgngb\" (UID: \"a76e7989-6bed-472b-8a4f-53227f485adb\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-tgngb" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.198787 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/63209f73-d6db-4f5e-9863-37c7d8555f1d-ovs-socket\") pod \"nmstate-handler-9nfcm\" (UID: \"63209f73-d6db-4f5e-9863-37c7d8555f1d\") " pod="openshift-nmstate/nmstate-handler-9nfcm" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.198820 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/63209f73-d6db-4f5e-9863-37c7d8555f1d-dbus-socket\") pod \"nmstate-handler-9nfcm\" (UID: \"63209f73-d6db-4f5e-9863-37c7d8555f1d\") " pod="openshift-nmstate/nmstate-handler-9nfcm" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.198836 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/2f1f259b-9d4c-469c-b336-0f7c4fdac5be-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-v7tfb\" (UID: \"2f1f259b-9d4c-469c-b336-0f7c4fdac5be\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v7tfb" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.200017 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/63209f73-d6db-4f5e-9863-37c7d8555f1d-nmstate-lock\") pod \"nmstate-handler-9nfcm\" (UID: \"63209f73-d6db-4f5e-9863-37c7d8555f1d\") " pod="openshift-nmstate/nmstate-handler-9nfcm" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.200092 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/63209f73-d6db-4f5e-9863-37c7d8555f1d-ovs-socket\") pod \"nmstate-handler-9nfcm\" (UID: \"63209f73-d6db-4f5e-9863-37c7d8555f1d\") " pod="openshift-nmstate/nmstate-handler-9nfcm" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.200357 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/63209f73-d6db-4f5e-9863-37c7d8555f1d-dbus-socket\") pod \"nmstate-handler-9nfcm\" (UID: \"63209f73-d6db-4f5e-9863-37c7d8555f1d\") " pod="openshift-nmstate/nmstate-handler-9nfcm" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.214917 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqtt5\" (UniqueName: \"kubernetes.io/projected/2f1f259b-9d4c-469c-b336-0f7c4fdac5be-kube-api-access-pqtt5\") pod \"nmstate-webhook-6cdbc54649-v7tfb\" (UID: \"2f1f259b-9d4c-469c-b336-0f7c4fdac5be\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v7tfb" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.215304 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4p98x\" (UniqueName: \"kubernetes.io/projected/63209f73-d6db-4f5e-9863-37c7d8555f1d-kube-api-access-4p98x\") pod \"nmstate-handler-9nfcm\" (UID: \"63209f73-d6db-4f5e-9863-37c7d8555f1d\") " pod="openshift-nmstate/nmstate-handler-9nfcm" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.215402 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54x44\" (UniqueName: \"kubernetes.io/projected/a76e7989-6bed-472b-8a4f-53227f485adb-kube-api-access-54x44\") pod \"nmstate-metrics-fdff9cb8d-tgngb\" (UID: \"a76e7989-6bed-472b-8a4f-53227f485adb\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-tgngb" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.222569 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/2f1f259b-9d4c-469c-b336-0f7c4fdac5be-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-v7tfb\" (UID: \"2f1f259b-9d4c-469c-b336-0f7c4fdac5be\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v7tfb" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.299703 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwgm7\" (UniqueName: \"kubernetes.io/projected/15bec576-7113-4f6f-8f5a-ed95b3e01608-kube-api-access-cwgm7\") pod \"nmstate-console-plugin-6b874cbd85-47vrf\" (UID: \"15bec576-7113-4f6f-8f5a-ed95b3e01608\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.299801 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/15bec576-7113-4f6f-8f5a-ed95b3e01608-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-47vrf\" (UID: \"15bec576-7113-4f6f-8f5a-ed95b3e01608\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.299829 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/15bec576-7113-4f6f-8f5a-ed95b3e01608-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-47vrf\" (UID: \"15bec576-7113-4f6f-8f5a-ed95b3e01608\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf" Oct 13 21:21:50 crc kubenswrapper[4689]: E1013 21:21:50.300048 4689 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Oct 13 21:21:50 crc kubenswrapper[4689]: E1013 21:21:50.300105 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/15bec576-7113-4f6f-8f5a-ed95b3e01608-plugin-serving-cert podName:15bec576-7113-4f6f-8f5a-ed95b3e01608 nodeName:}" failed. No retries permitted until 2025-10-13 21:21:50.800085939 +0000 UTC m=+627.718331024 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/15bec576-7113-4f6f-8f5a-ed95b3e01608-plugin-serving-cert") pod "nmstate-console-plugin-6b874cbd85-47vrf" (UID: "15bec576-7113-4f6f-8f5a-ed95b3e01608") : secret "plugin-serving-cert" not found Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.301217 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/15bec576-7113-4f6f-8f5a-ed95b3e01608-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-47vrf\" (UID: \"15bec576-7113-4f6f-8f5a-ed95b3e01608\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.315255 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwgm7\" (UniqueName: \"kubernetes.io/projected/15bec576-7113-4f6f-8f5a-ed95b3e01608-kube-api-access-cwgm7\") pod \"nmstate-console-plugin-6b874cbd85-47vrf\" (UID: \"15bec576-7113-4f6f-8f5a-ed95b3e01608\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.347382 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-6c958d4579-vdwrn"] Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.348244 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.354244 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-tgngb" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.357922 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6c958d4579-vdwrn"] Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.374968 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v7tfb" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.401227 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/66b3342e-9a48-4e0f-8082-732fadba0fd5-console-oauth-config\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.401369 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/66b3342e-9a48-4e0f-8082-732fadba0fd5-console-serving-cert\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.401420 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6scfs\" (UniqueName: \"kubernetes.io/projected/66b3342e-9a48-4e0f-8082-732fadba0fd5-kube-api-access-6scfs\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.401448 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/66b3342e-9a48-4e0f-8082-732fadba0fd5-trusted-ca-bundle\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.401606 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/66b3342e-9a48-4e0f-8082-732fadba0fd5-service-ca\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.401651 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/66b3342e-9a48-4e0f-8082-732fadba0fd5-console-config\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.401685 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/66b3342e-9a48-4e0f-8082-732fadba0fd5-oauth-serving-cert\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.403237 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-9nfcm" Oct 13 21:21:50 crc kubenswrapper[4689]: W1013 21:21:50.429779 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod63209f73_d6db_4f5e_9863_37c7d8555f1d.slice/crio-b999a9f3670a4b2189b3b97cf836a11d7ff19b903a0f4a133add3f77c987db06 WatchSource:0}: Error finding container b999a9f3670a4b2189b3b97cf836a11d7ff19b903a0f4a133add3f77c987db06: Status 404 returned error can't find the container with id b999a9f3670a4b2189b3b97cf836a11d7ff19b903a0f4a133add3f77c987db06 Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.505189 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/66b3342e-9a48-4e0f-8082-732fadba0fd5-service-ca\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.505510 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/66b3342e-9a48-4e0f-8082-732fadba0fd5-console-config\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.505535 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/66b3342e-9a48-4e0f-8082-732fadba0fd5-oauth-serving-cert\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.505567 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/66b3342e-9a48-4e0f-8082-732fadba0fd5-console-oauth-config\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.505622 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/66b3342e-9a48-4e0f-8082-732fadba0fd5-console-serving-cert\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.505652 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6scfs\" (UniqueName: \"kubernetes.io/projected/66b3342e-9a48-4e0f-8082-732fadba0fd5-kube-api-access-6scfs\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.505674 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/66b3342e-9a48-4e0f-8082-732fadba0fd5-trusted-ca-bundle\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.506483 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/66b3342e-9a48-4e0f-8082-732fadba0fd5-service-ca\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.507102 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/66b3342e-9a48-4e0f-8082-732fadba0fd5-oauth-serving-cert\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.507190 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/66b3342e-9a48-4e0f-8082-732fadba0fd5-trusted-ca-bundle\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.507216 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/66b3342e-9a48-4e0f-8082-732fadba0fd5-console-config\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.511662 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/66b3342e-9a48-4e0f-8082-732fadba0fd5-console-serving-cert\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.513262 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/66b3342e-9a48-4e0f-8082-732fadba0fd5-console-oauth-config\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.526793 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6scfs\" (UniqueName: \"kubernetes.io/projected/66b3342e-9a48-4e0f-8082-732fadba0fd5-kube-api-access-6scfs\") pod \"console-6c958d4579-vdwrn\" (UID: \"66b3342e-9a48-4e0f-8082-732fadba0fd5\") " pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.636620 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-v7tfb"] Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.664688 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.755767 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-tgngb"] Oct 13 21:21:50 crc kubenswrapper[4689]: W1013 21:21:50.765523 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda76e7989_6bed_472b_8a4f_53227f485adb.slice/crio-3bd5397cbb383a3861f974e2b60c885116a54399ef57e020f48c4cd21cdf83f5 WatchSource:0}: Error finding container 3bd5397cbb383a3861f974e2b60c885116a54399ef57e020f48c4cd21cdf83f5: Status 404 returned error can't find the container with id 3bd5397cbb383a3861f974e2b60c885116a54399ef57e020f48c4cd21cdf83f5 Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.809754 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/15bec576-7113-4f6f-8f5a-ed95b3e01608-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-47vrf\" (UID: \"15bec576-7113-4f6f-8f5a-ed95b3e01608\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.812649 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/15bec576-7113-4f6f-8f5a-ed95b3e01608-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-47vrf\" (UID: \"15bec576-7113-4f6f-8f5a-ed95b3e01608\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf" Oct 13 21:21:50 crc kubenswrapper[4689]: I1013 21:21:50.833937 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6c958d4579-vdwrn"] Oct 13 21:21:50 crc kubenswrapper[4689]: W1013 21:21:50.839448 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66b3342e_9a48_4e0f_8082_732fadba0fd5.slice/crio-cee0ba2e4a9c0c20f393aebf937a1d2d5ad546b68fa40ba9eb0f22e93b409952 WatchSource:0}: Error finding container cee0ba2e4a9c0c20f393aebf937a1d2d5ad546b68fa40ba9eb0f22e93b409952: Status 404 returned error can't find the container with id cee0ba2e4a9c0c20f393aebf937a1d2d5ad546b68fa40ba9eb0f22e93b409952 Oct 13 21:21:51 crc kubenswrapper[4689]: I1013 21:21:51.002657 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6c958d4579-vdwrn" event={"ID":"66b3342e-9a48-4e0f-8082-732fadba0fd5","Type":"ContainerStarted","Data":"b6a6d71ba54769658f72eb0f8cfc4ed79e803a1ccd45801ccea27a43ba2ba8c1"} Oct 13 21:21:51 crc kubenswrapper[4689]: I1013 21:21:51.003172 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6c958d4579-vdwrn" event={"ID":"66b3342e-9a48-4e0f-8082-732fadba0fd5","Type":"ContainerStarted","Data":"cee0ba2e4a9c0c20f393aebf937a1d2d5ad546b68fa40ba9eb0f22e93b409952"} Oct 13 21:21:51 crc kubenswrapper[4689]: I1013 21:21:51.004662 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-tgngb" event={"ID":"a76e7989-6bed-472b-8a4f-53227f485adb","Type":"ContainerStarted","Data":"3bd5397cbb383a3861f974e2b60c885116a54399ef57e020f48c4cd21cdf83f5"} Oct 13 21:21:51 crc kubenswrapper[4689]: I1013 21:21:51.005872 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-9nfcm" event={"ID":"63209f73-d6db-4f5e-9863-37c7d8555f1d","Type":"ContainerStarted","Data":"b999a9f3670a4b2189b3b97cf836a11d7ff19b903a0f4a133add3f77c987db06"} Oct 13 21:21:51 crc kubenswrapper[4689]: I1013 21:21:51.007021 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v7tfb" event={"ID":"2f1f259b-9d4c-469c-b336-0f7c4fdac5be","Type":"ContainerStarted","Data":"4613b7d3d89f539562c7b79f90bbed0d1f4979324fca960bc7998793d5312319"} Oct 13 21:21:51 crc kubenswrapper[4689]: I1013 21:21:51.023568 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-6c958d4579-vdwrn" podStartSLOduration=1.023553242 podStartE2EDuration="1.023553242s" podCreationTimestamp="2025-10-13 21:21:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:21:51.021364872 +0000 UTC m=+627.939609957" watchObservedRunningTime="2025-10-13 21:21:51.023553242 +0000 UTC m=+627.941798327" Oct 13 21:21:51 crc kubenswrapper[4689]: I1013 21:21:51.080793 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf" Oct 13 21:21:51 crc kubenswrapper[4689]: I1013 21:21:51.253848 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf"] Oct 13 21:21:51 crc kubenswrapper[4689]: W1013 21:21:51.255733 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15bec576_7113_4f6f_8f5a_ed95b3e01608.slice/crio-95746c2d27f9bb446ceab0d0398a23ca32eeb4e01299de68c3de6b767e332d9f WatchSource:0}: Error finding container 95746c2d27f9bb446ceab0d0398a23ca32eeb4e01299de68c3de6b767e332d9f: Status 404 returned error can't find the container with id 95746c2d27f9bb446ceab0d0398a23ca32eeb4e01299de68c3de6b767e332d9f Oct 13 21:21:52 crc kubenswrapper[4689]: I1013 21:21:52.014221 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf" event={"ID":"15bec576-7113-4f6f-8f5a-ed95b3e01608","Type":"ContainerStarted","Data":"95746c2d27f9bb446ceab0d0398a23ca32eeb4e01299de68c3de6b767e332d9f"} Oct 13 21:21:53 crc kubenswrapper[4689]: I1013 21:21:53.861749 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:21:53 crc kubenswrapper[4689]: I1013 21:21:53.862255 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:21:53 crc kubenswrapper[4689]: I1013 21:21:53.862299 4689 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:21:53 crc kubenswrapper[4689]: I1013 21:21:53.862972 4689 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b8c850c25ce9ad448a6035ec03d9103c3aaadb9f60c108dca0caaf9e3d4833c4"} pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 21:21:53 crc kubenswrapper[4689]: I1013 21:21:53.863022 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" containerID="cri-o://b8c850c25ce9ad448a6035ec03d9103c3aaadb9f60c108dca0caaf9e3d4833c4" gracePeriod=600 Oct 13 21:21:54 crc kubenswrapper[4689]: I1013 21:21:54.033080 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v7tfb" event={"ID":"2f1f259b-9d4c-469c-b336-0f7c4fdac5be","Type":"ContainerStarted","Data":"3228d8b192ac08a0b333ac374a9afe5796ab89354b97d1c9dc8740d99e161286"} Oct 13 21:21:54 crc kubenswrapper[4689]: I1013 21:21:54.033513 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v7tfb" Oct 13 21:21:54 crc kubenswrapper[4689]: I1013 21:21:54.037532 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-9nfcm" event={"ID":"63209f73-d6db-4f5e-9863-37c7d8555f1d","Type":"ContainerStarted","Data":"4c7fbd6a51cf525876753bfb5e4da3940dd82007adde3be8d66bbcb24af5b0c6"} Oct 13 21:21:54 crc kubenswrapper[4689]: I1013 21:21:54.038208 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-9nfcm" Oct 13 21:21:54 crc kubenswrapper[4689]: I1013 21:21:54.041633 4689 generic.go:334] "Generic (PLEG): container finished" podID="1863da92-265f-451e-a741-a184c8d3f781" containerID="b8c850c25ce9ad448a6035ec03d9103c3aaadb9f60c108dca0caaf9e3d4833c4" exitCode=0 Oct 13 21:21:54 crc kubenswrapper[4689]: I1013 21:21:54.041691 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerDied","Data":"b8c850c25ce9ad448a6035ec03d9103c3aaadb9f60c108dca0caaf9e3d4833c4"} Oct 13 21:21:54 crc kubenswrapper[4689]: I1013 21:21:54.041729 4689 scope.go:117] "RemoveContainer" containerID="a2f64fccdbff6f9fbf34a77ede4edac00241e08c4bc51864952122589c103b1a" Oct 13 21:21:54 crc kubenswrapper[4689]: I1013 21:21:54.045661 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-tgngb" event={"ID":"a76e7989-6bed-472b-8a4f-53227f485adb","Type":"ContainerStarted","Data":"e35f3bbe875ba3b3a3dbc624b8ad687e8fa09426ece07d9e8fa097892eab697c"} Oct 13 21:21:54 crc kubenswrapper[4689]: I1013 21:21:54.055301 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v7tfb" podStartSLOduration=1.570759939 podStartE2EDuration="4.055279648s" podCreationTimestamp="2025-10-13 21:21:50 +0000 UTC" firstStartedPulling="2025-10-13 21:21:50.642576225 +0000 UTC m=+627.560821310" lastFinishedPulling="2025-10-13 21:21:53.127095934 +0000 UTC m=+630.045341019" observedRunningTime="2025-10-13 21:21:54.04787186 +0000 UTC m=+630.966116945" watchObservedRunningTime="2025-10-13 21:21:54.055279648 +0000 UTC m=+630.973524733" Oct 13 21:21:54 crc kubenswrapper[4689]: I1013 21:21:54.069044 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-9nfcm" podStartSLOduration=1.429104696 podStartE2EDuration="4.069025609s" podCreationTimestamp="2025-10-13 21:21:50 +0000 UTC" firstStartedPulling="2025-10-13 21:21:50.431624622 +0000 UTC m=+627.349869707" lastFinishedPulling="2025-10-13 21:21:53.071545535 +0000 UTC m=+629.989790620" observedRunningTime="2025-10-13 21:21:54.067851363 +0000 UTC m=+630.986096448" watchObservedRunningTime="2025-10-13 21:21:54.069025609 +0000 UTC m=+630.987270694" Oct 13 21:21:55 crc kubenswrapper[4689]: I1013 21:21:55.055296 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerStarted","Data":"d6e09cc8455c50704247801d05cbebdbf7631acaa5e20cfd3bbcab24f523d8ed"} Oct 13 21:21:55 crc kubenswrapper[4689]: I1013 21:21:55.060707 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf" event={"ID":"15bec576-7113-4f6f-8f5a-ed95b3e01608","Type":"ContainerStarted","Data":"9036b0ecd9926fe8be986850fbffa7ccae1f8ce5102a124fa800aa44a2bf568a"} Oct 13 21:21:55 crc kubenswrapper[4689]: I1013 21:21:55.096271 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-47vrf" podStartSLOduration=2.384937377 podStartE2EDuration="5.096232629s" podCreationTimestamp="2025-10-13 21:21:50 +0000 UTC" firstStartedPulling="2025-10-13 21:21:51.258049208 +0000 UTC m=+628.176294293" lastFinishedPulling="2025-10-13 21:21:53.96934446 +0000 UTC m=+630.887589545" observedRunningTime="2025-10-13 21:21:55.084313888 +0000 UTC m=+632.002558973" watchObservedRunningTime="2025-10-13 21:21:55.096232629 +0000 UTC m=+632.014477714" Oct 13 21:21:56 crc kubenswrapper[4689]: I1013 21:21:56.068641 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-tgngb" event={"ID":"a76e7989-6bed-472b-8a4f-53227f485adb","Type":"ContainerStarted","Data":"c2ff7defc6fed02f29a325dc3c63c3d223c95ce88c89c4a64e1b22e9e03429dc"} Oct 13 21:21:56 crc kubenswrapper[4689]: I1013 21:21:56.100007 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-tgngb" podStartSLOduration=1.209589 podStartE2EDuration="6.099977996s" podCreationTimestamp="2025-10-13 21:21:50 +0000 UTC" firstStartedPulling="2025-10-13 21:21:50.767922236 +0000 UTC m=+627.686167321" lastFinishedPulling="2025-10-13 21:21:55.658311232 +0000 UTC m=+632.576556317" observedRunningTime="2025-10-13 21:21:56.099065525 +0000 UTC m=+633.017310650" watchObservedRunningTime="2025-10-13 21:21:56.099977996 +0000 UTC m=+633.018223091" Oct 13 21:22:00 crc kubenswrapper[4689]: I1013 21:22:00.427096 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-9nfcm" Oct 13 21:22:00 crc kubenswrapper[4689]: I1013 21:22:00.665787 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:22:00 crc kubenswrapper[4689]: I1013 21:22:00.665970 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:22:00 crc kubenswrapper[4689]: I1013 21:22:00.673203 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:22:01 crc kubenswrapper[4689]: I1013 21:22:01.110773 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-6c958d4579-vdwrn" Oct 13 21:22:01 crc kubenswrapper[4689]: I1013 21:22:01.203180 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-z5vx4"] Oct 13 21:22:10 crc kubenswrapper[4689]: I1013 21:22:10.381292 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-v7tfb" Oct 13 21:22:24 crc kubenswrapper[4689]: I1013 21:22:24.675026 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94"] Oct 13 21:22:24 crc kubenswrapper[4689]: I1013 21:22:24.677927 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" Oct 13 21:22:24 crc kubenswrapper[4689]: I1013 21:22:24.679854 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 13 21:22:24 crc kubenswrapper[4689]: I1013 21:22:24.684710 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94"] Oct 13 21:22:24 crc kubenswrapper[4689]: I1013 21:22:24.793965 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94\" (UID: \"8adf85c6-59ab-4e3f-8830-9d7509cb34b4\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" Oct 13 21:22:24 crc kubenswrapper[4689]: I1013 21:22:24.794018 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94\" (UID: \"8adf85c6-59ab-4e3f-8830-9d7509cb34b4\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" Oct 13 21:22:24 crc kubenswrapper[4689]: I1013 21:22:24.794077 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82cdd\" (UniqueName: \"kubernetes.io/projected/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-kube-api-access-82cdd\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94\" (UID: \"8adf85c6-59ab-4e3f-8830-9d7509cb34b4\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" Oct 13 21:22:24 crc kubenswrapper[4689]: I1013 21:22:24.895314 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94\" (UID: \"8adf85c6-59ab-4e3f-8830-9d7509cb34b4\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" Oct 13 21:22:24 crc kubenswrapper[4689]: I1013 21:22:24.895369 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94\" (UID: \"8adf85c6-59ab-4e3f-8830-9d7509cb34b4\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" Oct 13 21:22:24 crc kubenswrapper[4689]: I1013 21:22:24.895461 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82cdd\" (UniqueName: \"kubernetes.io/projected/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-kube-api-access-82cdd\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94\" (UID: \"8adf85c6-59ab-4e3f-8830-9d7509cb34b4\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" Oct 13 21:22:24 crc kubenswrapper[4689]: I1013 21:22:24.896077 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94\" (UID: \"8adf85c6-59ab-4e3f-8830-9d7509cb34b4\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" Oct 13 21:22:24 crc kubenswrapper[4689]: I1013 21:22:24.896305 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94\" (UID: \"8adf85c6-59ab-4e3f-8830-9d7509cb34b4\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" Oct 13 21:22:24 crc kubenswrapper[4689]: I1013 21:22:24.919019 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82cdd\" (UniqueName: \"kubernetes.io/projected/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-kube-api-access-82cdd\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94\" (UID: \"8adf85c6-59ab-4e3f-8830-9d7509cb34b4\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" Oct 13 21:22:24 crc kubenswrapper[4689]: I1013 21:22:24.995165 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" Oct 13 21:22:25 crc kubenswrapper[4689]: I1013 21:22:25.205893 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94"] Oct 13 21:22:25 crc kubenswrapper[4689]: I1013 21:22:25.249014 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" event={"ID":"8adf85c6-59ab-4e3f-8830-9d7509cb34b4","Type":"ContainerStarted","Data":"79581b48c83903e87fc618eea400f18732763ccdfb11ec69ecb2c516d0f13113"} Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.259109 4689 generic.go:334] "Generic (PLEG): container finished" podID="8adf85c6-59ab-4e3f-8830-9d7509cb34b4" containerID="0234f7f677a6b771d5f3621b785e2942082ee672a69524dcca3917118fe69237" exitCode=0 Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.259201 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" event={"ID":"8adf85c6-59ab-4e3f-8830-9d7509cb34b4","Type":"ContainerDied","Data":"0234f7f677a6b771d5f3621b785e2942082ee672a69524dcca3917118fe69237"} Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.267559 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-z5vx4" podUID="0d4c9845-75c1-43df-b20c-2e90d4830d84" containerName="console" containerID="cri-o://cf84c96efb39c26b9c33d933b0eab7627358d4514a2afe157ff8820f7fbcf6d4" gracePeriod=15 Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.621052 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-z5vx4_0d4c9845-75c1-43df-b20c-2e90d4830d84/console/0.log" Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.621362 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.720292 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-oauth-serving-cert\") pod \"0d4c9845-75c1-43df-b20c-2e90d4830d84\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.720357 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-oauth-config\") pod \"0d4c9845-75c1-43df-b20c-2e90d4830d84\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.720409 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-service-ca\") pod \"0d4c9845-75c1-43df-b20c-2e90d4830d84\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.720438 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-config\") pod \"0d4c9845-75c1-43df-b20c-2e90d4830d84\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.720469 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-serving-cert\") pod \"0d4c9845-75c1-43df-b20c-2e90d4830d84\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.720514 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-df9q8\" (UniqueName: \"kubernetes.io/projected/0d4c9845-75c1-43df-b20c-2e90d4830d84-kube-api-access-df9q8\") pod \"0d4c9845-75c1-43df-b20c-2e90d4830d84\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.720556 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-trusted-ca-bundle\") pod \"0d4c9845-75c1-43df-b20c-2e90d4830d84\" (UID: \"0d4c9845-75c1-43df-b20c-2e90d4830d84\") " Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.721408 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-config" (OuterVolumeSpecName: "console-config") pod "0d4c9845-75c1-43df-b20c-2e90d4830d84" (UID: "0d4c9845-75c1-43df-b20c-2e90d4830d84"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.721481 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-service-ca" (OuterVolumeSpecName: "service-ca") pod "0d4c9845-75c1-43df-b20c-2e90d4830d84" (UID: "0d4c9845-75c1-43df-b20c-2e90d4830d84"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.721495 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "0d4c9845-75c1-43df-b20c-2e90d4830d84" (UID: "0d4c9845-75c1-43df-b20c-2e90d4830d84"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.721555 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "0d4c9845-75c1-43df-b20c-2e90d4830d84" (UID: "0d4c9845-75c1-43df-b20c-2e90d4830d84"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.726085 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "0d4c9845-75c1-43df-b20c-2e90d4830d84" (UID: "0d4c9845-75c1-43df-b20c-2e90d4830d84"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.726114 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d4c9845-75c1-43df-b20c-2e90d4830d84-kube-api-access-df9q8" (OuterVolumeSpecName: "kube-api-access-df9q8") pod "0d4c9845-75c1-43df-b20c-2e90d4830d84" (UID: "0d4c9845-75c1-43df-b20c-2e90d4830d84"). InnerVolumeSpecName "kube-api-access-df9q8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.726414 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "0d4c9845-75c1-43df-b20c-2e90d4830d84" (UID: "0d4c9845-75c1-43df-b20c-2e90d4830d84"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.822574 4689 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.822668 4689 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.822692 4689 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-service-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.822709 4689 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.822726 4689 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0d4c9845-75c1-43df-b20c-2e90d4830d84-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.822745 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-df9q8\" (UniqueName: \"kubernetes.io/projected/0d4c9845-75c1-43df-b20c-2e90d4830d84-kube-api-access-df9q8\") on node \"crc\" DevicePath \"\"" Oct 13 21:22:26 crc kubenswrapper[4689]: I1013 21:22:26.822764 4689 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0d4c9845-75c1-43df-b20c-2e90d4830d84-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:22:27 crc kubenswrapper[4689]: I1013 21:22:27.270171 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-z5vx4_0d4c9845-75c1-43df-b20c-2e90d4830d84/console/0.log" Oct 13 21:22:27 crc kubenswrapper[4689]: I1013 21:22:27.271273 4689 generic.go:334] "Generic (PLEG): container finished" podID="0d4c9845-75c1-43df-b20c-2e90d4830d84" containerID="cf84c96efb39c26b9c33d933b0eab7627358d4514a2afe157ff8820f7fbcf6d4" exitCode=2 Oct 13 21:22:27 crc kubenswrapper[4689]: I1013 21:22:27.271354 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z5vx4" event={"ID":"0d4c9845-75c1-43df-b20c-2e90d4830d84","Type":"ContainerDied","Data":"cf84c96efb39c26b9c33d933b0eab7627358d4514a2afe157ff8820f7fbcf6d4"} Oct 13 21:22:27 crc kubenswrapper[4689]: I1013 21:22:27.271423 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z5vx4" event={"ID":"0d4c9845-75c1-43df-b20c-2e90d4830d84","Type":"ContainerDied","Data":"3378aad154596b1a7016394dacd13113ce904ff52e6d04996f955121cedbcf48"} Oct 13 21:22:27 crc kubenswrapper[4689]: I1013 21:22:27.271374 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z5vx4" Oct 13 21:22:27 crc kubenswrapper[4689]: I1013 21:22:27.271455 4689 scope.go:117] "RemoveContainer" containerID="cf84c96efb39c26b9c33d933b0eab7627358d4514a2afe157ff8820f7fbcf6d4" Oct 13 21:22:27 crc kubenswrapper[4689]: I1013 21:22:27.296179 4689 scope.go:117] "RemoveContainer" containerID="cf84c96efb39c26b9c33d933b0eab7627358d4514a2afe157ff8820f7fbcf6d4" Oct 13 21:22:27 crc kubenswrapper[4689]: E1013 21:22:27.296874 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf84c96efb39c26b9c33d933b0eab7627358d4514a2afe157ff8820f7fbcf6d4\": container with ID starting with cf84c96efb39c26b9c33d933b0eab7627358d4514a2afe157ff8820f7fbcf6d4 not found: ID does not exist" containerID="cf84c96efb39c26b9c33d933b0eab7627358d4514a2afe157ff8820f7fbcf6d4" Oct 13 21:22:27 crc kubenswrapper[4689]: I1013 21:22:27.296921 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf84c96efb39c26b9c33d933b0eab7627358d4514a2afe157ff8820f7fbcf6d4"} err="failed to get container status \"cf84c96efb39c26b9c33d933b0eab7627358d4514a2afe157ff8820f7fbcf6d4\": rpc error: code = NotFound desc = could not find container \"cf84c96efb39c26b9c33d933b0eab7627358d4514a2afe157ff8820f7fbcf6d4\": container with ID starting with cf84c96efb39c26b9c33d933b0eab7627358d4514a2afe157ff8820f7fbcf6d4 not found: ID does not exist" Oct 13 21:22:27 crc kubenswrapper[4689]: I1013 21:22:27.311701 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-z5vx4"] Oct 13 21:22:27 crc kubenswrapper[4689]: I1013 21:22:27.319995 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-z5vx4"] Oct 13 21:22:27 crc kubenswrapper[4689]: I1013 21:22:27.874199 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d4c9845-75c1-43df-b20c-2e90d4830d84" path="/var/lib/kubelet/pods/0d4c9845-75c1-43df-b20c-2e90d4830d84/volumes" Oct 13 21:22:28 crc kubenswrapper[4689]: I1013 21:22:28.279866 4689 generic.go:334] "Generic (PLEG): container finished" podID="8adf85c6-59ab-4e3f-8830-9d7509cb34b4" containerID="c444fa214c8336f2539eb06bc9ffd4e42329e8c319310c5556d24fd970fdf4d2" exitCode=0 Oct 13 21:22:28 crc kubenswrapper[4689]: I1013 21:22:28.279965 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" event={"ID":"8adf85c6-59ab-4e3f-8830-9d7509cb34b4","Type":"ContainerDied","Data":"c444fa214c8336f2539eb06bc9ffd4e42329e8c319310c5556d24fd970fdf4d2"} Oct 13 21:22:29 crc kubenswrapper[4689]: I1013 21:22:29.288242 4689 generic.go:334] "Generic (PLEG): container finished" podID="8adf85c6-59ab-4e3f-8830-9d7509cb34b4" containerID="7398b450dcb0992ef0280b0bcace27860c8f9603924d2ce3b1b8c80e5ce54d46" exitCode=0 Oct 13 21:22:29 crc kubenswrapper[4689]: I1013 21:22:29.288286 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" event={"ID":"8adf85c6-59ab-4e3f-8830-9d7509cb34b4","Type":"ContainerDied","Data":"7398b450dcb0992ef0280b0bcace27860c8f9603924d2ce3b1b8c80e5ce54d46"} Oct 13 21:22:30 crc kubenswrapper[4689]: I1013 21:22:30.565078 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" Oct 13 21:22:30 crc kubenswrapper[4689]: I1013 21:22:30.671011 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82cdd\" (UniqueName: \"kubernetes.io/projected/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-kube-api-access-82cdd\") pod \"8adf85c6-59ab-4e3f-8830-9d7509cb34b4\" (UID: \"8adf85c6-59ab-4e3f-8830-9d7509cb34b4\") " Oct 13 21:22:30 crc kubenswrapper[4689]: I1013 21:22:30.671201 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-bundle\") pod \"8adf85c6-59ab-4e3f-8830-9d7509cb34b4\" (UID: \"8adf85c6-59ab-4e3f-8830-9d7509cb34b4\") " Oct 13 21:22:30 crc kubenswrapper[4689]: I1013 21:22:30.671237 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-util\") pod \"8adf85c6-59ab-4e3f-8830-9d7509cb34b4\" (UID: \"8adf85c6-59ab-4e3f-8830-9d7509cb34b4\") " Oct 13 21:22:30 crc kubenswrapper[4689]: I1013 21:22:30.672785 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-bundle" (OuterVolumeSpecName: "bundle") pod "8adf85c6-59ab-4e3f-8830-9d7509cb34b4" (UID: "8adf85c6-59ab-4e3f-8830-9d7509cb34b4"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:22:30 crc kubenswrapper[4689]: I1013 21:22:30.677774 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-kube-api-access-82cdd" (OuterVolumeSpecName: "kube-api-access-82cdd") pod "8adf85c6-59ab-4e3f-8830-9d7509cb34b4" (UID: "8adf85c6-59ab-4e3f-8830-9d7509cb34b4"). InnerVolumeSpecName "kube-api-access-82cdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:22:30 crc kubenswrapper[4689]: I1013 21:22:30.772086 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82cdd\" (UniqueName: \"kubernetes.io/projected/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-kube-api-access-82cdd\") on node \"crc\" DevicePath \"\"" Oct 13 21:22:30 crc kubenswrapper[4689]: I1013 21:22:30.772117 4689 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:22:30 crc kubenswrapper[4689]: I1013 21:22:30.884323 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-util" (OuterVolumeSpecName: "util") pod "8adf85c6-59ab-4e3f-8830-9d7509cb34b4" (UID: "8adf85c6-59ab-4e3f-8830-9d7509cb34b4"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:22:30 crc kubenswrapper[4689]: I1013 21:22:30.974803 4689 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8adf85c6-59ab-4e3f-8830-9d7509cb34b4-util\") on node \"crc\" DevicePath \"\"" Oct 13 21:22:31 crc kubenswrapper[4689]: I1013 21:22:31.304845 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" event={"ID":"8adf85c6-59ab-4e3f-8830-9d7509cb34b4","Type":"ContainerDied","Data":"79581b48c83903e87fc618eea400f18732763ccdfb11ec69ecb2c516d0f13113"} Oct 13 21:22:31 crc kubenswrapper[4689]: I1013 21:22:31.304889 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79581b48c83903e87fc618eea400f18732763ccdfb11ec69ecb2c516d0f13113" Oct 13 21:22:31 crc kubenswrapper[4689]: I1013 21:22:31.304907 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94" Oct 13 21:22:39 crc kubenswrapper[4689]: I1013 21:22:39.993382 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw"] Oct 13 21:22:39 crc kubenswrapper[4689]: E1013 21:22:39.994205 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8adf85c6-59ab-4e3f-8830-9d7509cb34b4" containerName="extract" Oct 13 21:22:39 crc kubenswrapper[4689]: I1013 21:22:39.994224 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8adf85c6-59ab-4e3f-8830-9d7509cb34b4" containerName="extract" Oct 13 21:22:39 crc kubenswrapper[4689]: E1013 21:22:39.994242 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8adf85c6-59ab-4e3f-8830-9d7509cb34b4" containerName="util" Oct 13 21:22:39 crc kubenswrapper[4689]: I1013 21:22:39.994250 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8adf85c6-59ab-4e3f-8830-9d7509cb34b4" containerName="util" Oct 13 21:22:39 crc kubenswrapper[4689]: E1013 21:22:39.994262 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8adf85c6-59ab-4e3f-8830-9d7509cb34b4" containerName="pull" Oct 13 21:22:39 crc kubenswrapper[4689]: I1013 21:22:39.994269 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8adf85c6-59ab-4e3f-8830-9d7509cb34b4" containerName="pull" Oct 13 21:22:39 crc kubenswrapper[4689]: E1013 21:22:39.994282 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d4c9845-75c1-43df-b20c-2e90d4830d84" containerName="console" Oct 13 21:22:39 crc kubenswrapper[4689]: I1013 21:22:39.994290 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d4c9845-75c1-43df-b20c-2e90d4830d84" containerName="console" Oct 13 21:22:39 crc kubenswrapper[4689]: I1013 21:22:39.994421 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d4c9845-75c1-43df-b20c-2e90d4830d84" containerName="console" Oct 13 21:22:39 crc kubenswrapper[4689]: I1013 21:22:39.994439 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="8adf85c6-59ab-4e3f-8830-9d7509cb34b4" containerName="extract" Oct 13 21:22:39 crc kubenswrapper[4689]: I1013 21:22:39.994936 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw" Oct 13 21:22:39 crc kubenswrapper[4689]: I1013 21:22:39.996449 4689 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 13 21:22:39 crc kubenswrapper[4689]: I1013 21:22:39.997159 4689 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 13 21:22:39 crc kubenswrapper[4689]: I1013 21:22:39.997197 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 13 21:22:39 crc kubenswrapper[4689]: I1013 21:22:39.997249 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 13 21:22:39 crc kubenswrapper[4689]: I1013 21:22:39.998240 4689 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-t4krx" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.078161 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw"] Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.094836 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1-apiservice-cert\") pod \"metallb-operator-controller-manager-5dd59d54d9-fw8tw\" (UID: \"61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1\") " pod="metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.094889 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1-webhook-cert\") pod \"metallb-operator-controller-manager-5dd59d54d9-fw8tw\" (UID: \"61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1\") " pod="metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.094934 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cxvs\" (UniqueName: \"kubernetes.io/projected/61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1-kube-api-access-8cxvs\") pod \"metallb-operator-controller-manager-5dd59d54d9-fw8tw\" (UID: \"61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1\") " pod="metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.195695 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1-apiservice-cert\") pod \"metallb-operator-controller-manager-5dd59d54d9-fw8tw\" (UID: \"61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1\") " pod="metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.196611 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1-webhook-cert\") pod \"metallb-operator-controller-manager-5dd59d54d9-fw8tw\" (UID: \"61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1\") " pod="metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.196654 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cxvs\" (UniqueName: \"kubernetes.io/projected/61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1-kube-api-access-8cxvs\") pod \"metallb-operator-controller-manager-5dd59d54d9-fw8tw\" (UID: \"61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1\") " pod="metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.202881 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1-webhook-cert\") pod \"metallb-operator-controller-manager-5dd59d54d9-fw8tw\" (UID: \"61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1\") " pod="metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.217264 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1-apiservice-cert\") pod \"metallb-operator-controller-manager-5dd59d54d9-fw8tw\" (UID: \"61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1\") " pod="metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.217559 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn"] Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.218299 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.221002 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cxvs\" (UniqueName: \"kubernetes.io/projected/61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1-kube-api-access-8cxvs\") pod \"metallb-operator-controller-manager-5dd59d54d9-fw8tw\" (UID: \"61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1\") " pod="metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.221622 4689 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.221881 4689 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-pv76b" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.221992 4689 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.233373 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn"] Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.298194 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/58b9bc04-cebe-4c96-9fdc-14fd4a71f45e-webhook-cert\") pod \"metallb-operator-webhook-server-5776bf7669-g6lbn\" (UID: \"58b9bc04-cebe-4c96-9fdc-14fd4a71f45e\") " pod="metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.298279 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99zlp\" (UniqueName: \"kubernetes.io/projected/58b9bc04-cebe-4c96-9fdc-14fd4a71f45e-kube-api-access-99zlp\") pod \"metallb-operator-webhook-server-5776bf7669-g6lbn\" (UID: \"58b9bc04-cebe-4c96-9fdc-14fd4a71f45e\") " pod="metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.298342 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/58b9bc04-cebe-4c96-9fdc-14fd4a71f45e-apiservice-cert\") pod \"metallb-operator-webhook-server-5776bf7669-g6lbn\" (UID: \"58b9bc04-cebe-4c96-9fdc-14fd4a71f45e\") " pod="metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.317034 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.400159 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99zlp\" (UniqueName: \"kubernetes.io/projected/58b9bc04-cebe-4c96-9fdc-14fd4a71f45e-kube-api-access-99zlp\") pod \"metallb-operator-webhook-server-5776bf7669-g6lbn\" (UID: \"58b9bc04-cebe-4c96-9fdc-14fd4a71f45e\") " pod="metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.400222 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/58b9bc04-cebe-4c96-9fdc-14fd4a71f45e-apiservice-cert\") pod \"metallb-operator-webhook-server-5776bf7669-g6lbn\" (UID: \"58b9bc04-cebe-4c96-9fdc-14fd4a71f45e\") " pod="metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.400298 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/58b9bc04-cebe-4c96-9fdc-14fd4a71f45e-webhook-cert\") pod \"metallb-operator-webhook-server-5776bf7669-g6lbn\" (UID: \"58b9bc04-cebe-4c96-9fdc-14fd4a71f45e\") " pod="metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.403635 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/58b9bc04-cebe-4c96-9fdc-14fd4a71f45e-webhook-cert\") pod \"metallb-operator-webhook-server-5776bf7669-g6lbn\" (UID: \"58b9bc04-cebe-4c96-9fdc-14fd4a71f45e\") " pod="metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.403969 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/58b9bc04-cebe-4c96-9fdc-14fd4a71f45e-apiservice-cert\") pod \"metallb-operator-webhook-server-5776bf7669-g6lbn\" (UID: \"58b9bc04-cebe-4c96-9fdc-14fd4a71f45e\") " pod="metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.431429 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99zlp\" (UniqueName: \"kubernetes.io/projected/58b9bc04-cebe-4c96-9fdc-14fd4a71f45e-kube-api-access-99zlp\") pod \"metallb-operator-webhook-server-5776bf7669-g6lbn\" (UID: \"58b9bc04-cebe-4c96-9fdc-14fd4a71f45e\") " pod="metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.579409 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn" Oct 13 21:22:40 crc kubenswrapper[4689]: I1013 21:22:40.745210 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw"] Oct 13 21:22:41 crc kubenswrapper[4689]: I1013 21:22:41.079348 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn"] Oct 13 21:22:41 crc kubenswrapper[4689]: I1013 21:22:41.360234 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw" event={"ID":"61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1","Type":"ContainerStarted","Data":"35d369c6f962dccf34d6b37997f0f950a43e8d360d2948d2465b758d593ecad7"} Oct 13 21:22:41 crc kubenswrapper[4689]: I1013 21:22:41.361262 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn" event={"ID":"58b9bc04-cebe-4c96-9fdc-14fd4a71f45e","Type":"ContainerStarted","Data":"5b7372c8975bfc033d0abe80445d28147f6672af1caebd1ffef83935dcd686ab"} Oct 13 21:22:44 crc kubenswrapper[4689]: I1013 21:22:44.400719 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw" event={"ID":"61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1","Type":"ContainerStarted","Data":"263d01b6fc6e38532c4eb7f8adc832766a7b58cd44168737f6c1adbcfa90196c"} Oct 13 21:22:44 crc kubenswrapper[4689]: I1013 21:22:44.401376 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw" Oct 13 21:22:44 crc kubenswrapper[4689]: I1013 21:22:44.425275 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw" podStartSLOduration=2.046026264 podStartE2EDuration="5.425252652s" podCreationTimestamp="2025-10-13 21:22:39 +0000 UTC" firstStartedPulling="2025-10-13 21:22:40.769841647 +0000 UTC m=+677.688086742" lastFinishedPulling="2025-10-13 21:22:44.149068045 +0000 UTC m=+681.067313130" observedRunningTime="2025-10-13 21:22:44.421896834 +0000 UTC m=+681.340141929" watchObservedRunningTime="2025-10-13 21:22:44.425252652 +0000 UTC m=+681.343497747" Oct 13 21:22:46 crc kubenswrapper[4689]: I1013 21:22:46.414425 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn" event={"ID":"58b9bc04-cebe-4c96-9fdc-14fd4a71f45e","Type":"ContainerStarted","Data":"d308c8197485e5ec5e740bf48dfbb301e68380cdb0e2b876f4fa73b4b821e05e"} Oct 13 21:22:46 crc kubenswrapper[4689]: I1013 21:22:46.443559 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn" podStartSLOduration=1.499369884 podStartE2EDuration="6.443536181s" podCreationTimestamp="2025-10-13 21:22:40 +0000 UTC" firstStartedPulling="2025-10-13 21:22:41.091183596 +0000 UTC m=+678.009428681" lastFinishedPulling="2025-10-13 21:22:46.035349893 +0000 UTC m=+682.953594978" observedRunningTime="2025-10-13 21:22:46.439782133 +0000 UTC m=+683.358027248" watchObservedRunningTime="2025-10-13 21:22:46.443536181 +0000 UTC m=+683.361781266" Oct 13 21:22:47 crc kubenswrapper[4689]: I1013 21:22:47.420325 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn" Oct 13 21:23:00 crc kubenswrapper[4689]: I1013 21:23:00.584866 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5776bf7669-g6lbn" Oct 13 21:23:20 crc kubenswrapper[4689]: I1013 21:23:20.322398 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-5dd59d54d9-fw8tw" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.111804 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-bqf92"] Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.112938 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-bqf92" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.114597 4689 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.114765 4689 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-fq566" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.115366 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-7d7t6"] Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.118259 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.119608 4689 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.120049 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.132971 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-bqf92"] Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.170546 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/13456adb-0ae6-4db3-a924-dabf915a24aa-cert\") pod \"frr-k8s-webhook-server-64bf5d555-bqf92\" (UID: \"13456adb-0ae6-4db3-a924-dabf915a24aa\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-bqf92" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.170932 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/bb5b52f5-2cd3-4945-8242-96deb1549036-reloader\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.170956 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/bb5b52f5-2cd3-4945-8242-96deb1549036-frr-startup\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.171017 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sl27s\" (UniqueName: \"kubernetes.io/projected/13456adb-0ae6-4db3-a924-dabf915a24aa-kube-api-access-sl27s\") pod \"frr-k8s-webhook-server-64bf5d555-bqf92\" (UID: \"13456adb-0ae6-4db3-a924-dabf915a24aa\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-bqf92" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.171048 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bb5b52f5-2cd3-4945-8242-96deb1549036-metrics-certs\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.171068 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5pjp\" (UniqueName: \"kubernetes.io/projected/bb5b52f5-2cd3-4945-8242-96deb1549036-kube-api-access-j5pjp\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.171094 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/bb5b52f5-2cd3-4945-8242-96deb1549036-frr-sockets\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.171152 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/bb5b52f5-2cd3-4945-8242-96deb1549036-metrics\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.171167 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/bb5b52f5-2cd3-4945-8242-96deb1549036-frr-conf\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.180605 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-z2p9k"] Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.186670 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-z2p9k" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.193036 4689 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.193354 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.193448 4689 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.193567 4689 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-vdhk2" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.210872 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-fzzmz"] Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.211868 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-fzzmz" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.217742 4689 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.220902 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-fzzmz"] Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.272748 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r787q\" (UniqueName: \"kubernetes.io/projected/692201d0-1473-499e-b9e6-2d35e6c72032-kube-api-access-r787q\") pod \"controller-68d546b9d8-fzzmz\" (UID: \"692201d0-1473-499e-b9e6-2d35e6c72032\") " pod="metallb-system/controller-68d546b9d8-fzzmz" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.272815 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/13456adb-0ae6-4db3-a924-dabf915a24aa-cert\") pod \"frr-k8s-webhook-server-64bf5d555-bqf92\" (UID: \"13456adb-0ae6-4db3-a924-dabf915a24aa\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-bqf92" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.272849 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/bb5b52f5-2cd3-4945-8242-96deb1549036-reloader\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.272895 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/692201d0-1473-499e-b9e6-2d35e6c72032-metrics-certs\") pod \"controller-68d546b9d8-fzzmz\" (UID: \"692201d0-1473-499e-b9e6-2d35e6c72032\") " pod="metallb-system/controller-68d546b9d8-fzzmz" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.272914 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/bb5b52f5-2cd3-4945-8242-96deb1549036-frr-startup\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.272952 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/77872557-bf06-47e5-b7cb-0101ddd79f56-metallb-excludel2\") pod \"speaker-z2p9k\" (UID: \"77872557-bf06-47e5-b7cb-0101ddd79f56\") " pod="metallb-system/speaker-z2p9k" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.272994 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sl27s\" (UniqueName: \"kubernetes.io/projected/13456adb-0ae6-4db3-a924-dabf915a24aa-kube-api-access-sl27s\") pod \"frr-k8s-webhook-server-64bf5d555-bqf92\" (UID: \"13456adb-0ae6-4db3-a924-dabf915a24aa\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-bqf92" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.273040 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bb5b52f5-2cd3-4945-8242-96deb1549036-metrics-certs\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.273067 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5pjp\" (UniqueName: \"kubernetes.io/projected/bb5b52f5-2cd3-4945-8242-96deb1549036-kube-api-access-j5pjp\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.273086 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw54h\" (UniqueName: \"kubernetes.io/projected/77872557-bf06-47e5-b7cb-0101ddd79f56-kube-api-access-qw54h\") pod \"speaker-z2p9k\" (UID: \"77872557-bf06-47e5-b7cb-0101ddd79f56\") " pod="metallb-system/speaker-z2p9k" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.273154 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/bb5b52f5-2cd3-4945-8242-96deb1549036-frr-sockets\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.273230 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/692201d0-1473-499e-b9e6-2d35e6c72032-cert\") pod \"controller-68d546b9d8-fzzmz\" (UID: \"692201d0-1473-499e-b9e6-2d35e6c72032\") " pod="metallb-system/controller-68d546b9d8-fzzmz" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.273322 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/77872557-bf06-47e5-b7cb-0101ddd79f56-memberlist\") pod \"speaker-z2p9k\" (UID: \"77872557-bf06-47e5-b7cb-0101ddd79f56\") " pod="metallb-system/speaker-z2p9k" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.273399 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/77872557-bf06-47e5-b7cb-0101ddd79f56-metrics-certs\") pod \"speaker-z2p9k\" (UID: \"77872557-bf06-47e5-b7cb-0101ddd79f56\") " pod="metallb-system/speaker-z2p9k" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.273433 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/bb5b52f5-2cd3-4945-8242-96deb1549036-metrics\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.273489 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/bb5b52f5-2cd3-4945-8242-96deb1549036-frr-conf\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.274308 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/bb5b52f5-2cd3-4945-8242-96deb1549036-frr-conf\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.275348 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/bb5b52f5-2cd3-4945-8242-96deb1549036-frr-sockets\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: E1013 21:23:21.275471 4689 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Oct 13 21:23:21 crc kubenswrapper[4689]: E1013 21:23:21.275538 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bb5b52f5-2cd3-4945-8242-96deb1549036-metrics-certs podName:bb5b52f5-2cd3-4945-8242-96deb1549036 nodeName:}" failed. No retries permitted until 2025-10-13 21:23:21.775518966 +0000 UTC m=+718.693764131 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bb5b52f5-2cd3-4945-8242-96deb1549036-metrics-certs") pod "frr-k8s-7d7t6" (UID: "bb5b52f5-2cd3-4945-8242-96deb1549036") : secret "frr-k8s-certs-secret" not found Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.275540 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/bb5b52f5-2cd3-4945-8242-96deb1549036-reloader\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.275920 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/bb5b52f5-2cd3-4945-8242-96deb1549036-metrics\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.277753 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/bb5b52f5-2cd3-4945-8242-96deb1549036-frr-startup\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.284279 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/13456adb-0ae6-4db3-a924-dabf915a24aa-cert\") pod \"frr-k8s-webhook-server-64bf5d555-bqf92\" (UID: \"13456adb-0ae6-4db3-a924-dabf915a24aa\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-bqf92" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.290343 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5pjp\" (UniqueName: \"kubernetes.io/projected/bb5b52f5-2cd3-4945-8242-96deb1549036-kube-api-access-j5pjp\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.292174 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sl27s\" (UniqueName: \"kubernetes.io/projected/13456adb-0ae6-4db3-a924-dabf915a24aa-kube-api-access-sl27s\") pod \"frr-k8s-webhook-server-64bf5d555-bqf92\" (UID: \"13456adb-0ae6-4db3-a924-dabf915a24aa\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-bqf92" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.375193 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/692201d0-1473-499e-b9e6-2d35e6c72032-cert\") pod \"controller-68d546b9d8-fzzmz\" (UID: \"692201d0-1473-499e-b9e6-2d35e6c72032\") " pod="metallb-system/controller-68d546b9d8-fzzmz" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.375271 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/77872557-bf06-47e5-b7cb-0101ddd79f56-memberlist\") pod \"speaker-z2p9k\" (UID: \"77872557-bf06-47e5-b7cb-0101ddd79f56\") " pod="metallb-system/speaker-z2p9k" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.375309 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/77872557-bf06-47e5-b7cb-0101ddd79f56-metrics-certs\") pod \"speaker-z2p9k\" (UID: \"77872557-bf06-47e5-b7cb-0101ddd79f56\") " pod="metallb-system/speaker-z2p9k" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.375343 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r787q\" (UniqueName: \"kubernetes.io/projected/692201d0-1473-499e-b9e6-2d35e6c72032-kube-api-access-r787q\") pod \"controller-68d546b9d8-fzzmz\" (UID: \"692201d0-1473-499e-b9e6-2d35e6c72032\") " pod="metallb-system/controller-68d546b9d8-fzzmz" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.375370 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/692201d0-1473-499e-b9e6-2d35e6c72032-metrics-certs\") pod \"controller-68d546b9d8-fzzmz\" (UID: \"692201d0-1473-499e-b9e6-2d35e6c72032\") " pod="metallb-system/controller-68d546b9d8-fzzmz" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.375390 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/77872557-bf06-47e5-b7cb-0101ddd79f56-metallb-excludel2\") pod \"speaker-z2p9k\" (UID: \"77872557-bf06-47e5-b7cb-0101ddd79f56\") " pod="metallb-system/speaker-z2p9k" Oct 13 21:23:21 crc kubenswrapper[4689]: E1013 21:23:21.375406 4689 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.375444 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw54h\" (UniqueName: \"kubernetes.io/projected/77872557-bf06-47e5-b7cb-0101ddd79f56-kube-api-access-qw54h\") pod \"speaker-z2p9k\" (UID: \"77872557-bf06-47e5-b7cb-0101ddd79f56\") " pod="metallb-system/speaker-z2p9k" Oct 13 21:23:21 crc kubenswrapper[4689]: E1013 21:23:21.375473 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/77872557-bf06-47e5-b7cb-0101ddd79f56-memberlist podName:77872557-bf06-47e5-b7cb-0101ddd79f56 nodeName:}" failed. No retries permitted until 2025-10-13 21:23:21.875451074 +0000 UTC m=+718.793696159 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/77872557-bf06-47e5-b7cb-0101ddd79f56-memberlist") pod "speaker-z2p9k" (UID: "77872557-bf06-47e5-b7cb-0101ddd79f56") : secret "metallb-memberlist" not found Oct 13 21:23:21 crc kubenswrapper[4689]: E1013 21:23:21.375820 4689 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Oct 13 21:23:21 crc kubenswrapper[4689]: E1013 21:23:21.375869 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/692201d0-1473-499e-b9e6-2d35e6c72032-metrics-certs podName:692201d0-1473-499e-b9e6-2d35e6c72032 nodeName:}" failed. No retries permitted until 2025-10-13 21:23:21.875851244 +0000 UTC m=+718.794096319 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/692201d0-1473-499e-b9e6-2d35e6c72032-metrics-certs") pod "controller-68d546b9d8-fzzmz" (UID: "692201d0-1473-499e-b9e6-2d35e6c72032") : secret "controller-certs-secret" not found Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.376659 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/77872557-bf06-47e5-b7cb-0101ddd79f56-metallb-excludel2\") pod \"speaker-z2p9k\" (UID: \"77872557-bf06-47e5-b7cb-0101ddd79f56\") " pod="metallb-system/speaker-z2p9k" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.380030 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/692201d0-1473-499e-b9e6-2d35e6c72032-cert\") pod \"controller-68d546b9d8-fzzmz\" (UID: \"692201d0-1473-499e-b9e6-2d35e6c72032\") " pod="metallb-system/controller-68d546b9d8-fzzmz" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.385039 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/77872557-bf06-47e5-b7cb-0101ddd79f56-metrics-certs\") pod \"speaker-z2p9k\" (UID: \"77872557-bf06-47e5-b7cb-0101ddd79f56\") " pod="metallb-system/speaker-z2p9k" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.398295 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r787q\" (UniqueName: \"kubernetes.io/projected/692201d0-1473-499e-b9e6-2d35e6c72032-kube-api-access-r787q\") pod \"controller-68d546b9d8-fzzmz\" (UID: \"692201d0-1473-499e-b9e6-2d35e6c72032\") " pod="metallb-system/controller-68d546b9d8-fzzmz" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.401359 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw54h\" (UniqueName: \"kubernetes.io/projected/77872557-bf06-47e5-b7cb-0101ddd79f56-kube-api-access-qw54h\") pod \"speaker-z2p9k\" (UID: \"77872557-bf06-47e5-b7cb-0101ddd79f56\") " pod="metallb-system/speaker-z2p9k" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.434088 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-bqf92" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.621108 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-bqf92"] Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.780289 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bb5b52f5-2cd3-4945-8242-96deb1549036-metrics-certs\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.789240 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bb5b52f5-2cd3-4945-8242-96deb1549036-metrics-certs\") pod \"frr-k8s-7d7t6\" (UID: \"bb5b52f5-2cd3-4945-8242-96deb1549036\") " pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.881198 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/77872557-bf06-47e5-b7cb-0101ddd79f56-memberlist\") pod \"speaker-z2p9k\" (UID: \"77872557-bf06-47e5-b7cb-0101ddd79f56\") " pod="metallb-system/speaker-z2p9k" Oct 13 21:23:21 crc kubenswrapper[4689]: E1013 21:23:21.881327 4689 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 13 21:23:21 crc kubenswrapper[4689]: E1013 21:23:21.881455 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/77872557-bf06-47e5-b7cb-0101ddd79f56-memberlist podName:77872557-bf06-47e5-b7cb-0101ddd79f56 nodeName:}" failed. No retries permitted until 2025-10-13 21:23:22.881439579 +0000 UTC m=+719.799684664 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/77872557-bf06-47e5-b7cb-0101ddd79f56-memberlist") pod "speaker-z2p9k" (UID: "77872557-bf06-47e5-b7cb-0101ddd79f56") : secret "metallb-memberlist" not found Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.881389 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/692201d0-1473-499e-b9e6-2d35e6c72032-metrics-certs\") pod \"controller-68d546b9d8-fzzmz\" (UID: \"692201d0-1473-499e-b9e6-2d35e6c72032\") " pod="metallb-system/controller-68d546b9d8-fzzmz" Oct 13 21:23:21 crc kubenswrapper[4689]: I1013 21:23:21.884830 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/692201d0-1473-499e-b9e6-2d35e6c72032-metrics-certs\") pod \"controller-68d546b9d8-fzzmz\" (UID: \"692201d0-1473-499e-b9e6-2d35e6c72032\") " pod="metallb-system/controller-68d546b9d8-fzzmz" Oct 13 21:23:22 crc kubenswrapper[4689]: I1013 21:23:22.044380 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:22 crc kubenswrapper[4689]: I1013 21:23:22.127667 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-fzzmz" Oct 13 21:23:22 crc kubenswrapper[4689]: I1013 21:23:22.350532 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-fzzmz"] Oct 13 21:23:22 crc kubenswrapper[4689]: W1013 21:23:22.356399 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod692201d0_1473_499e_b9e6_2d35e6c72032.slice/crio-c8ddd3ba65a158ea53c7ba569d9f769f2bbade4a0021d3ea1352800ddedbef93 WatchSource:0}: Error finding container c8ddd3ba65a158ea53c7ba569d9f769f2bbade4a0021d3ea1352800ddedbef93: Status 404 returned error can't find the container with id c8ddd3ba65a158ea53c7ba569d9f769f2bbade4a0021d3ea1352800ddedbef93 Oct 13 21:23:22 crc kubenswrapper[4689]: I1013 21:23:22.622275 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7d7t6" event={"ID":"bb5b52f5-2cd3-4945-8242-96deb1549036","Type":"ContainerStarted","Data":"ccf0dd56a348179eef7fd0d121a34f214af0dbade892f4ac1d28a32dd9fe2576"} Oct 13 21:23:22 crc kubenswrapper[4689]: I1013 21:23:22.624229 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-fzzmz" event={"ID":"692201d0-1473-499e-b9e6-2d35e6c72032","Type":"ContainerStarted","Data":"138802c92db419986b419fbe27e803dcd45d3822416457fa1d3b02f7cdd08a5d"} Oct 13 21:23:22 crc kubenswrapper[4689]: I1013 21:23:22.624253 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-fzzmz" event={"ID":"692201d0-1473-499e-b9e6-2d35e6c72032","Type":"ContainerStarted","Data":"c8ddd3ba65a158ea53c7ba569d9f769f2bbade4a0021d3ea1352800ddedbef93"} Oct 13 21:23:22 crc kubenswrapper[4689]: I1013 21:23:22.624707 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-fzzmz" Oct 13 21:23:22 crc kubenswrapper[4689]: I1013 21:23:22.627491 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-bqf92" event={"ID":"13456adb-0ae6-4db3-a924-dabf915a24aa","Type":"ContainerStarted","Data":"491692cc20d7cc4d65018fa7a00f92e14b4bb0c90de0edbd14d5bfc1e0ad7732"} Oct 13 21:23:22 crc kubenswrapper[4689]: I1013 21:23:22.643020 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-fzzmz" podStartSLOduration=1.642999356 podStartE2EDuration="1.642999356s" podCreationTimestamp="2025-10-13 21:23:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:23:22.638581582 +0000 UTC m=+719.556826677" watchObservedRunningTime="2025-10-13 21:23:22.642999356 +0000 UTC m=+719.561244441" Oct 13 21:23:22 crc kubenswrapper[4689]: I1013 21:23:22.896153 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/77872557-bf06-47e5-b7cb-0101ddd79f56-memberlist\") pod \"speaker-z2p9k\" (UID: \"77872557-bf06-47e5-b7cb-0101ddd79f56\") " pod="metallb-system/speaker-z2p9k" Oct 13 21:23:22 crc kubenswrapper[4689]: I1013 21:23:22.902025 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/77872557-bf06-47e5-b7cb-0101ddd79f56-memberlist\") pod \"speaker-z2p9k\" (UID: \"77872557-bf06-47e5-b7cb-0101ddd79f56\") " pod="metallb-system/speaker-z2p9k" Oct 13 21:23:23 crc kubenswrapper[4689]: I1013 21:23:23.008367 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-z2p9k" Oct 13 21:23:23 crc kubenswrapper[4689]: W1013 21:23:23.028782 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77872557_bf06_47e5_b7cb_0101ddd79f56.slice/crio-f24ddb1cb4a8b4e4e0130bc333cb4644650f0eaca7629b312c319acf251c062a WatchSource:0}: Error finding container f24ddb1cb4a8b4e4e0130bc333cb4644650f0eaca7629b312c319acf251c062a: Status 404 returned error can't find the container with id f24ddb1cb4a8b4e4e0130bc333cb4644650f0eaca7629b312c319acf251c062a Oct 13 21:23:23 crc kubenswrapper[4689]: I1013 21:23:23.643491 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-z2p9k" event={"ID":"77872557-bf06-47e5-b7cb-0101ddd79f56","Type":"ContainerStarted","Data":"ec09370ad41cda3dcd714e14bba6caf85efe27d414ac26069fe3755ecc596aa0"} Oct 13 21:23:23 crc kubenswrapper[4689]: I1013 21:23:23.643846 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-z2p9k" event={"ID":"77872557-bf06-47e5-b7cb-0101ddd79f56","Type":"ContainerStarted","Data":"044a3dd32056e618eff6f1b3a5ab9d55301a215bbe6f6a9ba4d4946014762bd3"} Oct 13 21:23:23 crc kubenswrapper[4689]: I1013 21:23:23.643858 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-z2p9k" event={"ID":"77872557-bf06-47e5-b7cb-0101ddd79f56","Type":"ContainerStarted","Data":"f24ddb1cb4a8b4e4e0130bc333cb4644650f0eaca7629b312c319acf251c062a"} Oct 13 21:23:23 crc kubenswrapper[4689]: I1013 21:23:23.644071 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-z2p9k" Oct 13 21:23:23 crc kubenswrapper[4689]: I1013 21:23:23.646937 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-fzzmz" event={"ID":"692201d0-1473-499e-b9e6-2d35e6c72032","Type":"ContainerStarted","Data":"08a7c408b46717dce414af7a5057f0b3b9e13bea78ad972c984a2ab380ac4eb6"} Oct 13 21:23:23 crc kubenswrapper[4689]: I1013 21:23:23.674917 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-z2p9k" podStartSLOduration=2.674897403 podStartE2EDuration="2.674897403s" podCreationTimestamp="2025-10-13 21:23:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:23:23.670962271 +0000 UTC m=+720.589207356" watchObservedRunningTime="2025-10-13 21:23:23.674897403 +0000 UTC m=+720.593142488" Oct 13 21:23:29 crc kubenswrapper[4689]: I1013 21:23:29.696765 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-bqf92" event={"ID":"13456adb-0ae6-4db3-a924-dabf915a24aa","Type":"ContainerStarted","Data":"8cb41aa97fcac7b1cfdf504356edc44c2f5b4c754e3d2340cfe460ad3bc1aec0"} Oct 13 21:23:29 crc kubenswrapper[4689]: I1013 21:23:29.697382 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-bqf92" Oct 13 21:23:29 crc kubenswrapper[4689]: I1013 21:23:29.700000 4689 generic.go:334] "Generic (PLEG): container finished" podID="bb5b52f5-2cd3-4945-8242-96deb1549036" containerID="139f9471a323d859fa9af4c69f34e0e9e39e4171042c65f0e62459810c7d6ad2" exitCode=0 Oct 13 21:23:29 crc kubenswrapper[4689]: I1013 21:23:29.700044 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7d7t6" event={"ID":"bb5b52f5-2cd3-4945-8242-96deb1549036","Type":"ContainerDied","Data":"139f9471a323d859fa9af4c69f34e0e9e39e4171042c65f0e62459810c7d6ad2"} Oct 13 21:23:29 crc kubenswrapper[4689]: I1013 21:23:29.711519 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-bqf92" podStartSLOduration=1.246858061 podStartE2EDuration="8.71150433s" podCreationTimestamp="2025-10-13 21:23:21 +0000 UTC" firstStartedPulling="2025-10-13 21:23:21.628401365 +0000 UTC m=+718.546646450" lastFinishedPulling="2025-10-13 21:23:29.093047634 +0000 UTC m=+726.011292719" observedRunningTime="2025-10-13 21:23:29.710689811 +0000 UTC m=+726.628934896" watchObservedRunningTime="2025-10-13 21:23:29.71150433 +0000 UTC m=+726.629749405" Oct 13 21:23:30 crc kubenswrapper[4689]: I1013 21:23:30.707156 4689 generic.go:334] "Generic (PLEG): container finished" podID="bb5b52f5-2cd3-4945-8242-96deb1549036" containerID="b5bd6dc9ff3dd1f3eb699e855b2f665446f3bc5921bec91f41b3634f1500b42e" exitCode=0 Oct 13 21:23:30 crc kubenswrapper[4689]: I1013 21:23:30.707213 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7d7t6" event={"ID":"bb5b52f5-2cd3-4945-8242-96deb1549036","Type":"ContainerDied","Data":"b5bd6dc9ff3dd1f3eb699e855b2f665446f3bc5921bec91f41b3634f1500b42e"} Oct 13 21:23:31 crc kubenswrapper[4689]: I1013 21:23:31.714679 4689 generic.go:334] "Generic (PLEG): container finished" podID="bb5b52f5-2cd3-4945-8242-96deb1549036" containerID="920fb6b9cf6fde3fbca166a55a892637c97aec9828039e1f3495abe1d094a549" exitCode=0 Oct 13 21:23:31 crc kubenswrapper[4689]: I1013 21:23:31.714736 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7d7t6" event={"ID":"bb5b52f5-2cd3-4945-8242-96deb1549036","Type":"ContainerDied","Data":"920fb6b9cf6fde3fbca166a55a892637c97aec9828039e1f3495abe1d094a549"} Oct 13 21:23:32 crc kubenswrapper[4689]: I1013 21:23:32.133851 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-fzzmz" Oct 13 21:23:32 crc kubenswrapper[4689]: I1013 21:23:32.726839 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7d7t6" event={"ID":"bb5b52f5-2cd3-4945-8242-96deb1549036","Type":"ContainerStarted","Data":"bf7856bddf17f1371222a645d5c7e5be0696d39d9255c7b6808a3ab5da672c91"} Oct 13 21:23:32 crc kubenswrapper[4689]: I1013 21:23:32.727224 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7d7t6" event={"ID":"bb5b52f5-2cd3-4945-8242-96deb1549036","Type":"ContainerStarted","Data":"c4d8268b6c8b81bd2ad1031899a3f4175afbfce567fe1f4f44edbc3548e8dc63"} Oct 13 21:23:33 crc kubenswrapper[4689]: I1013 21:23:33.011798 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-z2p9k" Oct 13 21:23:33 crc kubenswrapper[4689]: I1013 21:23:33.739090 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7d7t6" event={"ID":"bb5b52f5-2cd3-4945-8242-96deb1549036","Type":"ContainerStarted","Data":"b6ee1cf17fc0fa2c585a500bd07d7143693aac5ee80a1cb94ec2b3456220acfe"} Oct 13 21:23:33 crc kubenswrapper[4689]: I1013 21:23:33.739151 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7d7t6" event={"ID":"bb5b52f5-2cd3-4945-8242-96deb1549036","Type":"ContainerStarted","Data":"c246688ae4adb0ee9a488bb17feb9599d0dc5a8461e0e0d46c05c8f69c57fd51"} Oct 13 21:23:33 crc kubenswrapper[4689]: I1013 21:23:33.739166 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7d7t6" event={"ID":"bb5b52f5-2cd3-4945-8242-96deb1549036","Type":"ContainerStarted","Data":"a0643f1310aaf6c9956782e54ffa08cd292737a67bc7f1d62feb00ba13086200"} Oct 13 21:23:33 crc kubenswrapper[4689]: I1013 21:23:33.739178 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7d7t6" event={"ID":"bb5b52f5-2cd3-4945-8242-96deb1549036","Type":"ContainerStarted","Data":"c9e993c9be04914a26fa07dc868f481506b4747206d6f31138808380c0cc703d"} Oct 13 21:23:33 crc kubenswrapper[4689]: I1013 21:23:33.739296 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:33 crc kubenswrapper[4689]: I1013 21:23:33.762270 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-7d7t6" podStartSLOduration=5.895226371 podStartE2EDuration="12.762247073s" podCreationTimestamp="2025-10-13 21:23:21 +0000 UTC" firstStartedPulling="2025-10-13 21:23:22.200752439 +0000 UTC m=+719.118997524" lastFinishedPulling="2025-10-13 21:23:29.067773141 +0000 UTC m=+725.986018226" observedRunningTime="2025-10-13 21:23:33.758917555 +0000 UTC m=+730.677162640" watchObservedRunningTime="2025-10-13 21:23:33.762247073 +0000 UTC m=+730.680492158" Oct 13 21:23:37 crc kubenswrapper[4689]: I1013 21:23:37.045652 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:37 crc kubenswrapper[4689]: I1013 21:23:37.080775 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:39 crc kubenswrapper[4689]: I1013 21:23:39.498570 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-wv7t5"] Oct 13 21:23:39 crc kubenswrapper[4689]: I1013 21:23:39.500575 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wv7t5" Oct 13 21:23:39 crc kubenswrapper[4689]: I1013 21:23:39.502473 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 13 21:23:39 crc kubenswrapper[4689]: I1013 21:23:39.502974 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-hc5j7" Oct 13 21:23:39 crc kubenswrapper[4689]: I1013 21:23:39.503316 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 13 21:23:39 crc kubenswrapper[4689]: I1013 21:23:39.511753 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-wv7t5"] Oct 13 21:23:39 crc kubenswrapper[4689]: I1013 21:23:39.526919 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xncz2\" (UniqueName: \"kubernetes.io/projected/c2f0840b-8103-4f7a-8698-3fd60e779a59-kube-api-access-xncz2\") pod \"openstack-operator-index-wv7t5\" (UID: \"c2f0840b-8103-4f7a-8698-3fd60e779a59\") " pod="openstack-operators/openstack-operator-index-wv7t5" Oct 13 21:23:39 crc kubenswrapper[4689]: I1013 21:23:39.628846 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xncz2\" (UniqueName: \"kubernetes.io/projected/c2f0840b-8103-4f7a-8698-3fd60e779a59-kube-api-access-xncz2\") pod \"openstack-operator-index-wv7t5\" (UID: \"c2f0840b-8103-4f7a-8698-3fd60e779a59\") " pod="openstack-operators/openstack-operator-index-wv7t5" Oct 13 21:23:39 crc kubenswrapper[4689]: I1013 21:23:39.651387 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xncz2\" (UniqueName: \"kubernetes.io/projected/c2f0840b-8103-4f7a-8698-3fd60e779a59-kube-api-access-xncz2\") pod \"openstack-operator-index-wv7t5\" (UID: \"c2f0840b-8103-4f7a-8698-3fd60e779a59\") " pod="openstack-operators/openstack-operator-index-wv7t5" Oct 13 21:23:39 crc kubenswrapper[4689]: I1013 21:23:39.867329 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wv7t5" Oct 13 21:23:40 crc kubenswrapper[4689]: I1013 21:23:40.250789 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-wv7t5"] Oct 13 21:23:40 crc kubenswrapper[4689]: I1013 21:23:40.784786 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wv7t5" event={"ID":"c2f0840b-8103-4f7a-8698-3fd60e779a59","Type":"ContainerStarted","Data":"e2f8f791a285ddcb8a753b84dc78ee9928bc5c70b1e28a1b290315841cb38cc5"} Oct 13 21:23:41 crc kubenswrapper[4689]: I1013 21:23:41.445787 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-bqf92" Oct 13 21:23:42 crc kubenswrapper[4689]: I1013 21:23:42.059907 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-7d7t6" Oct 13 21:23:43 crc kubenswrapper[4689]: I1013 21:23:43.804784 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wv7t5" event={"ID":"c2f0840b-8103-4f7a-8698-3fd60e779a59","Type":"ContainerStarted","Data":"06e27249bd30d8bad48362a34f4e243a5d55476488a5b4e1e75474f83536a56d"} Oct 13 21:23:43 crc kubenswrapper[4689]: I1013 21:23:43.827551 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-wv7t5" podStartSLOduration=2.087315113 podStartE2EDuration="4.827529265s" podCreationTimestamp="2025-10-13 21:23:39 +0000 UTC" firstStartedPulling="2025-10-13 21:23:40.303917374 +0000 UTC m=+737.222162459" lastFinishedPulling="2025-10-13 21:23:43.044131526 +0000 UTC m=+739.962376611" observedRunningTime="2025-10-13 21:23:43.823815198 +0000 UTC m=+740.742060303" watchObservedRunningTime="2025-10-13 21:23:43.827529265 +0000 UTC m=+740.745774340" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.000698 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p7j66"] Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.001234 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" podUID="bd0e15ea-205d-4e73-822b-b16cc8f33ab2" containerName="controller-manager" containerID="cri-o://3db4760c48ac99b346ab3ff856a46fe0141eb1807663884942c9e4dac062b717" gracePeriod=30 Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.110500 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d"] Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.110758 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" podUID="cfbc99b4-60d2-42c6-8c34-13c7f60fd122" containerName="route-controller-manager" containerID="cri-o://f985627d0b851a1ca3700e284ef70fdfb7267da182009ed28d25bbb6e2802134" gracePeriod=30 Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.556141 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.561991 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.659773 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-config\") pod \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.659839 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8rkp\" (UniqueName: \"kubernetes.io/projected/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-kube-api-access-c8rkp\") pod \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.659869 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-serving-cert\") pod \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\" (UID: \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\") " Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.659907 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-client-ca\") pod \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.659932 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-serving-cert\") pod \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.659973 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nn6ht\" (UniqueName: \"kubernetes.io/projected/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-kube-api-access-nn6ht\") pod \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\" (UID: \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\") " Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.660063 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-client-ca\") pod \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\" (UID: \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\") " Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.660898 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-proxy-ca-bundles\") pod \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\" (UID: \"bd0e15ea-205d-4e73-822b-b16cc8f33ab2\") " Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.660955 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-config\") pod \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\" (UID: \"cfbc99b4-60d2-42c6-8c34-13c7f60fd122\") " Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.660640 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-client-ca" (OuterVolumeSpecName: "client-ca") pod "bd0e15ea-205d-4e73-822b-b16cc8f33ab2" (UID: "bd0e15ea-205d-4e73-822b-b16cc8f33ab2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.660705 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-config" (OuterVolumeSpecName: "config") pod "bd0e15ea-205d-4e73-822b-b16cc8f33ab2" (UID: "bd0e15ea-205d-4e73-822b-b16cc8f33ab2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.660922 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-client-ca" (OuterVolumeSpecName: "client-ca") pod "cfbc99b4-60d2-42c6-8c34-13c7f60fd122" (UID: "cfbc99b4-60d2-42c6-8c34-13c7f60fd122"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.661251 4689 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-client-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.661270 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.661281 4689 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-client-ca\") on node \"crc\" DevicePath \"\"" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.661515 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "bd0e15ea-205d-4e73-822b-b16cc8f33ab2" (UID: "bd0e15ea-205d-4e73-822b-b16cc8f33ab2"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.661777 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-config" (OuterVolumeSpecName: "config") pod "cfbc99b4-60d2-42c6-8c34-13c7f60fd122" (UID: "cfbc99b4-60d2-42c6-8c34-13c7f60fd122"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.665187 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bd0e15ea-205d-4e73-822b-b16cc8f33ab2" (UID: "bd0e15ea-205d-4e73-822b-b16cc8f33ab2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.665419 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-kube-api-access-c8rkp" (OuterVolumeSpecName: "kube-api-access-c8rkp") pod "bd0e15ea-205d-4e73-822b-b16cc8f33ab2" (UID: "bd0e15ea-205d-4e73-822b-b16cc8f33ab2"). InnerVolumeSpecName "kube-api-access-c8rkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.665545 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-kube-api-access-nn6ht" (OuterVolumeSpecName: "kube-api-access-nn6ht") pod "cfbc99b4-60d2-42c6-8c34-13c7f60fd122" (UID: "cfbc99b4-60d2-42c6-8c34-13c7f60fd122"). InnerVolumeSpecName "kube-api-access-nn6ht". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.665708 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "cfbc99b4-60d2-42c6-8c34-13c7f60fd122" (UID: "cfbc99b4-60d2-42c6-8c34-13c7f60fd122"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.762734 4689 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.762768 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.762779 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8rkp\" (UniqueName: \"kubernetes.io/projected/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-kube-api-access-c8rkp\") on node \"crc\" DevicePath \"\"" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.762791 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.762800 4689 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bd0e15ea-205d-4e73-822b-b16cc8f33ab2-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.762809 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nn6ht\" (UniqueName: \"kubernetes.io/projected/cfbc99b4-60d2-42c6-8c34-13c7f60fd122-kube-api-access-nn6ht\") on node \"crc\" DevicePath \"\"" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.837119 4689 generic.go:334] "Generic (PLEG): container finished" podID="cfbc99b4-60d2-42c6-8c34-13c7f60fd122" containerID="f985627d0b851a1ca3700e284ef70fdfb7267da182009ed28d25bbb6e2802134" exitCode=0 Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.837184 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.837192 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" event={"ID":"cfbc99b4-60d2-42c6-8c34-13c7f60fd122","Type":"ContainerDied","Data":"f985627d0b851a1ca3700e284ef70fdfb7267da182009ed28d25bbb6e2802134"} Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.837337 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d" event={"ID":"cfbc99b4-60d2-42c6-8c34-13c7f60fd122","Type":"ContainerDied","Data":"d5343c19dacfa88620b6eaac0ed6d5094f39583558aefacc6d0731c329d4467d"} Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.837367 4689 scope.go:117] "RemoveContainer" containerID="f985627d0b851a1ca3700e284ef70fdfb7267da182009ed28d25bbb6e2802134" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.839100 4689 generic.go:334] "Generic (PLEG): container finished" podID="bd0e15ea-205d-4e73-822b-b16cc8f33ab2" containerID="3db4760c48ac99b346ab3ff856a46fe0141eb1807663884942c9e4dac062b717" exitCode=0 Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.839140 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" event={"ID":"bd0e15ea-205d-4e73-822b-b16cc8f33ab2","Type":"ContainerDied","Data":"3db4760c48ac99b346ab3ff856a46fe0141eb1807663884942c9e4dac062b717"} Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.839179 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" event={"ID":"bd0e15ea-205d-4e73-822b-b16cc8f33ab2","Type":"ContainerDied","Data":"d394fd0ad18b78c2152aac5b99666d0e5480cb27edde98c1d8c9f6c829e35c64"} Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.839238 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-p7j66" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.860272 4689 scope.go:117] "RemoveContainer" containerID="f985627d0b851a1ca3700e284ef70fdfb7267da182009ed28d25bbb6e2802134" Oct 13 21:23:49 crc kubenswrapper[4689]: E1013 21:23:49.860831 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f985627d0b851a1ca3700e284ef70fdfb7267da182009ed28d25bbb6e2802134\": container with ID starting with f985627d0b851a1ca3700e284ef70fdfb7267da182009ed28d25bbb6e2802134 not found: ID does not exist" containerID="f985627d0b851a1ca3700e284ef70fdfb7267da182009ed28d25bbb6e2802134" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.860894 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f985627d0b851a1ca3700e284ef70fdfb7267da182009ed28d25bbb6e2802134"} err="failed to get container status \"f985627d0b851a1ca3700e284ef70fdfb7267da182009ed28d25bbb6e2802134\": rpc error: code = NotFound desc = could not find container \"f985627d0b851a1ca3700e284ef70fdfb7267da182009ed28d25bbb6e2802134\": container with ID starting with f985627d0b851a1ca3700e284ef70fdfb7267da182009ed28d25bbb6e2802134 not found: ID does not exist" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.860928 4689 scope.go:117] "RemoveContainer" containerID="3db4760c48ac99b346ab3ff856a46fe0141eb1807663884942c9e4dac062b717" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.868731 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-wv7t5" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.880142 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d"] Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.880198 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-82j7d"] Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.880218 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-wv7t5" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.880357 4689 scope.go:117] "RemoveContainer" containerID="3db4760c48ac99b346ab3ff856a46fe0141eb1807663884942c9e4dac062b717" Oct 13 21:23:49 crc kubenswrapper[4689]: E1013 21:23:49.880803 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3db4760c48ac99b346ab3ff856a46fe0141eb1807663884942c9e4dac062b717\": container with ID starting with 3db4760c48ac99b346ab3ff856a46fe0141eb1807663884942c9e4dac062b717 not found: ID does not exist" containerID="3db4760c48ac99b346ab3ff856a46fe0141eb1807663884942c9e4dac062b717" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.881624 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3db4760c48ac99b346ab3ff856a46fe0141eb1807663884942c9e4dac062b717"} err="failed to get container status \"3db4760c48ac99b346ab3ff856a46fe0141eb1807663884942c9e4dac062b717\": rpc error: code = NotFound desc = could not find container \"3db4760c48ac99b346ab3ff856a46fe0141eb1807663884942c9e4dac062b717\": container with ID starting with 3db4760c48ac99b346ab3ff856a46fe0141eb1807663884942c9e4dac062b717 not found: ID does not exist" Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.889465 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p7j66"] Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.892829 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-p7j66"] Oct 13 21:23:49 crc kubenswrapper[4689]: I1013 21:23:49.903302 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-wv7t5" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.399309 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf"] Oct 13 21:23:50 crc kubenswrapper[4689]: E1013 21:23:50.399936 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfbc99b4-60d2-42c6-8c34-13c7f60fd122" containerName="route-controller-manager" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.399950 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfbc99b4-60d2-42c6-8c34-13c7f60fd122" containerName="route-controller-manager" Oct 13 21:23:50 crc kubenswrapper[4689]: E1013 21:23:50.399959 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd0e15ea-205d-4e73-822b-b16cc8f33ab2" containerName="controller-manager" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.399981 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd0e15ea-205d-4e73-822b-b16cc8f33ab2" containerName="controller-manager" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.400294 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd0e15ea-205d-4e73-822b-b16cc8f33ab2" containerName="controller-manager" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.400305 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfbc99b4-60d2-42c6-8c34-13c7f60fd122" containerName="route-controller-manager" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.400734 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.402509 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.403501 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.404269 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.404437 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.405251 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.406843 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-55456dcf98-lk2cn"] Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.407365 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.407978 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.412576 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.412832 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.413193 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.413283 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.413499 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.414153 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.423898 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf"] Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.426570 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.439596 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-55456dcf98-lk2cn"] Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.471499 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89c06d8c-c8ed-485c-a827-d94f35c238e0-config\") pod \"route-controller-manager-84fb4f9fb7-m2gcf\" (UID: \"89c06d8c-c8ed-485c-a827-d94f35c238e0\") " pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.471609 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk65d\" (UniqueName: \"kubernetes.io/projected/bfa78066-df3f-44f4-a11c-306184429037-kube-api-access-zk65d\") pod \"controller-manager-55456dcf98-lk2cn\" (UID: \"bfa78066-df3f-44f4-a11c-306184429037\") " pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.471684 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/89c06d8c-c8ed-485c-a827-d94f35c238e0-serving-cert\") pod \"route-controller-manager-84fb4f9fb7-m2gcf\" (UID: \"89c06d8c-c8ed-485c-a827-d94f35c238e0\") " pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.471721 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bfa78066-df3f-44f4-a11c-306184429037-proxy-ca-bundles\") pod \"controller-manager-55456dcf98-lk2cn\" (UID: \"bfa78066-df3f-44f4-a11c-306184429037\") " pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.471747 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6j72\" (UniqueName: \"kubernetes.io/projected/89c06d8c-c8ed-485c-a827-d94f35c238e0-kube-api-access-x6j72\") pod \"route-controller-manager-84fb4f9fb7-m2gcf\" (UID: \"89c06d8c-c8ed-485c-a827-d94f35c238e0\") " pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.471774 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfa78066-df3f-44f4-a11c-306184429037-config\") pod \"controller-manager-55456dcf98-lk2cn\" (UID: \"bfa78066-df3f-44f4-a11c-306184429037\") " pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.471796 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bfa78066-df3f-44f4-a11c-306184429037-client-ca\") pod \"controller-manager-55456dcf98-lk2cn\" (UID: \"bfa78066-df3f-44f4-a11c-306184429037\") " pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.471820 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/89c06d8c-c8ed-485c-a827-d94f35c238e0-client-ca\") pod \"route-controller-manager-84fb4f9fb7-m2gcf\" (UID: \"89c06d8c-c8ed-485c-a827-d94f35c238e0\") " pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.471853 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bfa78066-df3f-44f4-a11c-306184429037-serving-cert\") pod \"controller-manager-55456dcf98-lk2cn\" (UID: \"bfa78066-df3f-44f4-a11c-306184429037\") " pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.572535 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/89c06d8c-c8ed-485c-a827-d94f35c238e0-serving-cert\") pod \"route-controller-manager-84fb4f9fb7-m2gcf\" (UID: \"89c06d8c-c8ed-485c-a827-d94f35c238e0\") " pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.572619 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bfa78066-df3f-44f4-a11c-306184429037-proxy-ca-bundles\") pod \"controller-manager-55456dcf98-lk2cn\" (UID: \"bfa78066-df3f-44f4-a11c-306184429037\") " pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.572649 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6j72\" (UniqueName: \"kubernetes.io/projected/89c06d8c-c8ed-485c-a827-d94f35c238e0-kube-api-access-x6j72\") pod \"route-controller-manager-84fb4f9fb7-m2gcf\" (UID: \"89c06d8c-c8ed-485c-a827-d94f35c238e0\") " pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.572680 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfa78066-df3f-44f4-a11c-306184429037-config\") pod \"controller-manager-55456dcf98-lk2cn\" (UID: \"bfa78066-df3f-44f4-a11c-306184429037\") " pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.572701 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bfa78066-df3f-44f4-a11c-306184429037-client-ca\") pod \"controller-manager-55456dcf98-lk2cn\" (UID: \"bfa78066-df3f-44f4-a11c-306184429037\") " pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.572720 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/89c06d8c-c8ed-485c-a827-d94f35c238e0-client-ca\") pod \"route-controller-manager-84fb4f9fb7-m2gcf\" (UID: \"89c06d8c-c8ed-485c-a827-d94f35c238e0\") " pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.572749 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bfa78066-df3f-44f4-a11c-306184429037-serving-cert\") pod \"controller-manager-55456dcf98-lk2cn\" (UID: \"bfa78066-df3f-44f4-a11c-306184429037\") " pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.572782 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89c06d8c-c8ed-485c-a827-d94f35c238e0-config\") pod \"route-controller-manager-84fb4f9fb7-m2gcf\" (UID: \"89c06d8c-c8ed-485c-a827-d94f35c238e0\") " pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.572826 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk65d\" (UniqueName: \"kubernetes.io/projected/bfa78066-df3f-44f4-a11c-306184429037-kube-api-access-zk65d\") pod \"controller-manager-55456dcf98-lk2cn\" (UID: \"bfa78066-df3f-44f4-a11c-306184429037\") " pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.574214 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/89c06d8c-c8ed-485c-a827-d94f35c238e0-client-ca\") pod \"route-controller-manager-84fb4f9fb7-m2gcf\" (UID: \"89c06d8c-c8ed-485c-a827-d94f35c238e0\") " pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.574253 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89c06d8c-c8ed-485c-a827-d94f35c238e0-config\") pod \"route-controller-manager-84fb4f9fb7-m2gcf\" (UID: \"89c06d8c-c8ed-485c-a827-d94f35c238e0\") " pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.574324 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bfa78066-df3f-44f4-a11c-306184429037-client-ca\") pod \"controller-manager-55456dcf98-lk2cn\" (UID: \"bfa78066-df3f-44f4-a11c-306184429037\") " pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.574342 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfa78066-df3f-44f4-a11c-306184429037-config\") pod \"controller-manager-55456dcf98-lk2cn\" (UID: \"bfa78066-df3f-44f4-a11c-306184429037\") " pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.574939 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bfa78066-df3f-44f4-a11c-306184429037-proxy-ca-bundles\") pod \"controller-manager-55456dcf98-lk2cn\" (UID: \"bfa78066-df3f-44f4-a11c-306184429037\") " pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.583663 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/89c06d8c-c8ed-485c-a827-d94f35c238e0-serving-cert\") pod \"route-controller-manager-84fb4f9fb7-m2gcf\" (UID: \"89c06d8c-c8ed-485c-a827-d94f35c238e0\") " pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.587855 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bfa78066-df3f-44f4-a11c-306184429037-serving-cert\") pod \"controller-manager-55456dcf98-lk2cn\" (UID: \"bfa78066-df3f-44f4-a11c-306184429037\") " pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.589412 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6j72\" (UniqueName: \"kubernetes.io/projected/89c06d8c-c8ed-485c-a827-d94f35c238e0-kube-api-access-x6j72\") pod \"route-controller-manager-84fb4f9fb7-m2gcf\" (UID: \"89c06d8c-c8ed-485c-a827-d94f35c238e0\") " pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.593644 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk65d\" (UniqueName: \"kubernetes.io/projected/bfa78066-df3f-44f4-a11c-306184429037-kube-api-access-zk65d\") pod \"controller-manager-55456dcf98-lk2cn\" (UID: \"bfa78066-df3f-44f4-a11c-306184429037\") " pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.733621 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.743688 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:50 crc kubenswrapper[4689]: I1013 21:23:50.899222 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-wv7t5" Oct 13 21:23:51 crc kubenswrapper[4689]: I1013 21:23:51.031766 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-55456dcf98-lk2cn"] Oct 13 21:23:51 crc kubenswrapper[4689]: I1013 21:23:51.067572 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf"] Oct 13 21:23:51 crc kubenswrapper[4689]: W1013 21:23:51.078227 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod89c06d8c_c8ed_485c_a827_d94f35c238e0.slice/crio-80606bfaa526669fd8ea2af3fb1ca845ccb9c24c2406120df81b8df7a578bbbe WatchSource:0}: Error finding container 80606bfaa526669fd8ea2af3fb1ca845ccb9c24c2406120df81b8df7a578bbbe: Status 404 returned error can't find the container with id 80606bfaa526669fd8ea2af3fb1ca845ccb9c24c2406120df81b8df7a578bbbe Oct 13 21:23:51 crc kubenswrapper[4689]: I1013 21:23:51.889387 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd0e15ea-205d-4e73-822b-b16cc8f33ab2" path="/var/lib/kubelet/pods/bd0e15ea-205d-4e73-822b-b16cc8f33ab2/volumes" Oct 13 21:23:51 crc kubenswrapper[4689]: I1013 21:23:51.890744 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfbc99b4-60d2-42c6-8c34-13c7f60fd122" path="/var/lib/kubelet/pods/cfbc99b4-60d2-42c6-8c34-13c7f60fd122/volumes" Oct 13 21:23:51 crc kubenswrapper[4689]: I1013 21:23:51.891171 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" event={"ID":"bfa78066-df3f-44f4-a11c-306184429037","Type":"ContainerStarted","Data":"104a8fc3ca2119edaff0f0279806a0f347eb3de183cbfadcb6903b3bc68df868"} Oct 13 21:23:51 crc kubenswrapper[4689]: I1013 21:23:51.891193 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" event={"ID":"bfa78066-df3f-44f4-a11c-306184429037","Type":"ContainerStarted","Data":"6f030afdbdc5a60ecbc19128f092852af9d54b7a8e6e2cf6df6968467cec5276"} Oct 13 21:23:51 crc kubenswrapper[4689]: I1013 21:23:51.891215 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:51 crc kubenswrapper[4689]: I1013 21:23:51.891226 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" event={"ID":"89c06d8c-c8ed-485c-a827-d94f35c238e0","Type":"ContainerStarted","Data":"b5b9a218492e93470aaff067ddc6b1a597a4fd558a0e4d711b80d534e2e58096"} Oct 13 21:23:51 crc kubenswrapper[4689]: I1013 21:23:51.891238 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" Oct 13 21:23:51 crc kubenswrapper[4689]: I1013 21:23:51.891247 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" event={"ID":"89c06d8c-c8ed-485c-a827-d94f35c238e0","Type":"ContainerStarted","Data":"80606bfaa526669fd8ea2af3fb1ca845ccb9c24c2406120df81b8df7a578bbbe"} Oct 13 21:23:51 crc kubenswrapper[4689]: I1013 21:23:51.891281 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" Oct 13 21:23:51 crc kubenswrapper[4689]: I1013 21:23:51.894390 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" Oct 13 21:23:51 crc kubenswrapper[4689]: I1013 21:23:51.921897 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-84fb4f9fb7-m2gcf" podStartSLOduration=2.921858984 podStartE2EDuration="2.921858984s" podCreationTimestamp="2025-10-13 21:23:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:23:51.918833093 +0000 UTC m=+748.837078188" watchObservedRunningTime="2025-10-13 21:23:51.921858984 +0000 UTC m=+748.840104069" Oct 13 21:23:51 crc kubenswrapper[4689]: I1013 21:23:51.922177 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-55456dcf98-lk2cn" podStartSLOduration=2.922172581 podStartE2EDuration="2.922172581s" podCreationTimestamp="2025-10-13 21:23:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:23:51.898935776 +0000 UTC m=+748.817180861" watchObservedRunningTime="2025-10-13 21:23:51.922172581 +0000 UTC m=+748.840417666" Oct 13 21:23:52 crc kubenswrapper[4689]: I1013 21:23:52.341227 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd"] Oct 13 21:23:52 crc kubenswrapper[4689]: I1013 21:23:52.343009 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" Oct 13 21:23:52 crc kubenswrapper[4689]: I1013 21:23:52.345296 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-tljcc" Oct 13 21:23:52 crc kubenswrapper[4689]: I1013 21:23:52.349666 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd"] Oct 13 21:23:52 crc kubenswrapper[4689]: I1013 21:23:52.394206 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d1a40226-990d-4a34-b499-91ee14c3da86-util\") pod \"669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd\" (UID: \"d1a40226-990d-4a34-b499-91ee14c3da86\") " pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" Oct 13 21:23:52 crc kubenswrapper[4689]: I1013 21:23:52.394261 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d1a40226-990d-4a34-b499-91ee14c3da86-bundle\") pod \"669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd\" (UID: \"d1a40226-990d-4a34-b499-91ee14c3da86\") " pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" Oct 13 21:23:52 crc kubenswrapper[4689]: I1013 21:23:52.394374 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nzn6\" (UniqueName: \"kubernetes.io/projected/d1a40226-990d-4a34-b499-91ee14c3da86-kube-api-access-4nzn6\") pod \"669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd\" (UID: \"d1a40226-990d-4a34-b499-91ee14c3da86\") " pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" Oct 13 21:23:52 crc kubenswrapper[4689]: I1013 21:23:52.495309 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nzn6\" (UniqueName: \"kubernetes.io/projected/d1a40226-990d-4a34-b499-91ee14c3da86-kube-api-access-4nzn6\") pod \"669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd\" (UID: \"d1a40226-990d-4a34-b499-91ee14c3da86\") " pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" Oct 13 21:23:52 crc kubenswrapper[4689]: I1013 21:23:52.495383 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d1a40226-990d-4a34-b499-91ee14c3da86-util\") pod \"669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd\" (UID: \"d1a40226-990d-4a34-b499-91ee14c3da86\") " pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" Oct 13 21:23:52 crc kubenswrapper[4689]: I1013 21:23:52.495412 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d1a40226-990d-4a34-b499-91ee14c3da86-bundle\") pod \"669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd\" (UID: \"d1a40226-990d-4a34-b499-91ee14c3da86\") " pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" Oct 13 21:23:52 crc kubenswrapper[4689]: I1013 21:23:52.495971 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d1a40226-990d-4a34-b499-91ee14c3da86-bundle\") pod \"669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd\" (UID: \"d1a40226-990d-4a34-b499-91ee14c3da86\") " pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" Oct 13 21:23:52 crc kubenswrapper[4689]: I1013 21:23:52.496042 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d1a40226-990d-4a34-b499-91ee14c3da86-util\") pod \"669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd\" (UID: \"d1a40226-990d-4a34-b499-91ee14c3da86\") " pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" Oct 13 21:23:52 crc kubenswrapper[4689]: I1013 21:23:52.519059 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nzn6\" (UniqueName: \"kubernetes.io/projected/d1a40226-990d-4a34-b499-91ee14c3da86-kube-api-access-4nzn6\") pod \"669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd\" (UID: \"d1a40226-990d-4a34-b499-91ee14c3da86\") " pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" Oct 13 21:23:52 crc kubenswrapper[4689]: I1013 21:23:52.670523 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" Oct 13 21:23:52 crc kubenswrapper[4689]: I1013 21:23:52.899968 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd"] Oct 13 21:23:52 crc kubenswrapper[4689]: W1013 21:23:52.914932 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1a40226_990d_4a34_b499_91ee14c3da86.slice/crio-a34546bcabc70adb371732062bee07a5eee05c0fe8c2f614c90390d6b8de7a79 WatchSource:0}: Error finding container a34546bcabc70adb371732062bee07a5eee05c0fe8c2f614c90390d6b8de7a79: Status 404 returned error can't find the container with id a34546bcabc70adb371732062bee07a5eee05c0fe8c2f614c90390d6b8de7a79 Oct 13 21:23:53 crc kubenswrapper[4689]: I1013 21:23:53.904102 4689 generic.go:334] "Generic (PLEG): container finished" podID="d1a40226-990d-4a34-b499-91ee14c3da86" containerID="a226ef5e97c0bd04d0234782b21e4147898a2c8fce835db8ff1af99207b184cc" exitCode=0 Oct 13 21:23:53 crc kubenswrapper[4689]: I1013 21:23:53.904715 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" event={"ID":"d1a40226-990d-4a34-b499-91ee14c3da86","Type":"ContainerDied","Data":"a226ef5e97c0bd04d0234782b21e4147898a2c8fce835db8ff1af99207b184cc"} Oct 13 21:23:53 crc kubenswrapper[4689]: I1013 21:23:53.904758 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" event={"ID":"d1a40226-990d-4a34-b499-91ee14c3da86","Type":"ContainerStarted","Data":"a34546bcabc70adb371732062bee07a5eee05c0fe8c2f614c90390d6b8de7a79"} Oct 13 21:23:54 crc kubenswrapper[4689]: I1013 21:23:54.913253 4689 generic.go:334] "Generic (PLEG): container finished" podID="d1a40226-990d-4a34-b499-91ee14c3da86" containerID="6ce53d2a16d11b02ae119e49eab58b940996684181385101671e7e91a35bffcd" exitCode=0 Oct 13 21:23:54 crc kubenswrapper[4689]: I1013 21:23:54.913326 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" event={"ID":"d1a40226-990d-4a34-b499-91ee14c3da86","Type":"ContainerDied","Data":"6ce53d2a16d11b02ae119e49eab58b940996684181385101671e7e91a35bffcd"} Oct 13 21:23:55 crc kubenswrapper[4689]: I1013 21:23:55.923539 4689 generic.go:334] "Generic (PLEG): container finished" podID="d1a40226-990d-4a34-b499-91ee14c3da86" containerID="246c0548e0fc3738b6950c6f964f26614bcf33349c3f3dae36ed92ef2df4796e" exitCode=0 Oct 13 21:23:55 crc kubenswrapper[4689]: I1013 21:23:55.923702 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" event={"ID":"d1a40226-990d-4a34-b499-91ee14c3da86","Type":"ContainerDied","Data":"246c0548e0fc3738b6950c6f964f26614bcf33349c3f3dae36ed92ef2df4796e"} Oct 13 21:23:57 crc kubenswrapper[4689]: I1013 21:23:57.299457 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" Oct 13 21:23:57 crc kubenswrapper[4689]: I1013 21:23:57.369997 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4nzn6\" (UniqueName: \"kubernetes.io/projected/d1a40226-990d-4a34-b499-91ee14c3da86-kube-api-access-4nzn6\") pod \"d1a40226-990d-4a34-b499-91ee14c3da86\" (UID: \"d1a40226-990d-4a34-b499-91ee14c3da86\") " Oct 13 21:23:57 crc kubenswrapper[4689]: I1013 21:23:57.370064 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d1a40226-990d-4a34-b499-91ee14c3da86-bundle\") pod \"d1a40226-990d-4a34-b499-91ee14c3da86\" (UID: \"d1a40226-990d-4a34-b499-91ee14c3da86\") " Oct 13 21:23:57 crc kubenswrapper[4689]: I1013 21:23:57.370145 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d1a40226-990d-4a34-b499-91ee14c3da86-util\") pod \"d1a40226-990d-4a34-b499-91ee14c3da86\" (UID: \"d1a40226-990d-4a34-b499-91ee14c3da86\") " Oct 13 21:23:57 crc kubenswrapper[4689]: I1013 21:23:57.372094 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1a40226-990d-4a34-b499-91ee14c3da86-bundle" (OuterVolumeSpecName: "bundle") pod "d1a40226-990d-4a34-b499-91ee14c3da86" (UID: "d1a40226-990d-4a34-b499-91ee14c3da86"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:23:57 crc kubenswrapper[4689]: I1013 21:23:57.376443 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1a40226-990d-4a34-b499-91ee14c3da86-kube-api-access-4nzn6" (OuterVolumeSpecName: "kube-api-access-4nzn6") pod "d1a40226-990d-4a34-b499-91ee14c3da86" (UID: "d1a40226-990d-4a34-b499-91ee14c3da86"). InnerVolumeSpecName "kube-api-access-4nzn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:23:57 crc kubenswrapper[4689]: I1013 21:23:57.383506 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1a40226-990d-4a34-b499-91ee14c3da86-util" (OuterVolumeSpecName: "util") pod "d1a40226-990d-4a34-b499-91ee14c3da86" (UID: "d1a40226-990d-4a34-b499-91ee14c3da86"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:23:57 crc kubenswrapper[4689]: I1013 21:23:57.471871 4689 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d1a40226-990d-4a34-b499-91ee14c3da86-util\") on node \"crc\" DevicePath \"\"" Oct 13 21:23:57 crc kubenswrapper[4689]: I1013 21:23:57.471909 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4nzn6\" (UniqueName: \"kubernetes.io/projected/d1a40226-990d-4a34-b499-91ee14c3da86-kube-api-access-4nzn6\") on node \"crc\" DevicePath \"\"" Oct 13 21:23:57 crc kubenswrapper[4689]: I1013 21:23:57.471922 4689 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d1a40226-990d-4a34-b499-91ee14c3da86-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:23:57 crc kubenswrapper[4689]: I1013 21:23:57.938516 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" event={"ID":"d1a40226-990d-4a34-b499-91ee14c3da86","Type":"ContainerDied","Data":"a34546bcabc70adb371732062bee07a5eee05c0fe8c2f614c90390d6b8de7a79"} Oct 13 21:23:57 crc kubenswrapper[4689]: I1013 21:23:57.938567 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a34546bcabc70adb371732062bee07a5eee05c0fe8c2f614c90390d6b8de7a79" Oct 13 21:23:57 crc kubenswrapper[4689]: I1013 21:23:57.938566 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd" Oct 13 21:23:58 crc kubenswrapper[4689]: I1013 21:23:58.577612 4689 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 13 21:24:01 crc kubenswrapper[4689]: I1013 21:24:01.190452 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5555666847-lgpdl"] Oct 13 21:24:01 crc kubenswrapper[4689]: E1013 21:24:01.191211 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1a40226-990d-4a34-b499-91ee14c3da86" containerName="pull" Oct 13 21:24:01 crc kubenswrapper[4689]: I1013 21:24:01.191224 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1a40226-990d-4a34-b499-91ee14c3da86" containerName="pull" Oct 13 21:24:01 crc kubenswrapper[4689]: E1013 21:24:01.191246 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1a40226-990d-4a34-b499-91ee14c3da86" containerName="util" Oct 13 21:24:01 crc kubenswrapper[4689]: I1013 21:24:01.191252 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1a40226-990d-4a34-b499-91ee14c3da86" containerName="util" Oct 13 21:24:01 crc kubenswrapper[4689]: E1013 21:24:01.191267 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1a40226-990d-4a34-b499-91ee14c3da86" containerName="extract" Oct 13 21:24:01 crc kubenswrapper[4689]: I1013 21:24:01.191274 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1a40226-990d-4a34-b499-91ee14c3da86" containerName="extract" Oct 13 21:24:01 crc kubenswrapper[4689]: I1013 21:24:01.191383 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1a40226-990d-4a34-b499-91ee14c3da86" containerName="extract" Oct 13 21:24:01 crc kubenswrapper[4689]: I1013 21:24:01.192090 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-5555666847-lgpdl" Oct 13 21:24:01 crc kubenswrapper[4689]: I1013 21:24:01.194299 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-pv25p" Oct 13 21:24:01 crc kubenswrapper[4689]: I1013 21:24:01.219755 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chgwc\" (UniqueName: \"kubernetes.io/projected/6a94802f-3575-410f-8d65-f1d11165a10e-kube-api-access-chgwc\") pod \"openstack-operator-controller-operator-5555666847-lgpdl\" (UID: \"6a94802f-3575-410f-8d65-f1d11165a10e\") " pod="openstack-operators/openstack-operator-controller-operator-5555666847-lgpdl" Oct 13 21:24:01 crc kubenswrapper[4689]: I1013 21:24:01.235426 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5555666847-lgpdl"] Oct 13 21:24:01 crc kubenswrapper[4689]: I1013 21:24:01.321302 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chgwc\" (UniqueName: \"kubernetes.io/projected/6a94802f-3575-410f-8d65-f1d11165a10e-kube-api-access-chgwc\") pod \"openstack-operator-controller-operator-5555666847-lgpdl\" (UID: \"6a94802f-3575-410f-8d65-f1d11165a10e\") " pod="openstack-operators/openstack-operator-controller-operator-5555666847-lgpdl" Oct 13 21:24:01 crc kubenswrapper[4689]: I1013 21:24:01.357579 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chgwc\" (UniqueName: \"kubernetes.io/projected/6a94802f-3575-410f-8d65-f1d11165a10e-kube-api-access-chgwc\") pod \"openstack-operator-controller-operator-5555666847-lgpdl\" (UID: \"6a94802f-3575-410f-8d65-f1d11165a10e\") " pod="openstack-operators/openstack-operator-controller-operator-5555666847-lgpdl" Oct 13 21:24:01 crc kubenswrapper[4689]: I1013 21:24:01.508206 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-5555666847-lgpdl" Oct 13 21:24:02 crc kubenswrapper[4689]: I1013 21:24:02.008810 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5555666847-lgpdl"] Oct 13 21:24:02 crc kubenswrapper[4689]: W1013 21:24:02.010199 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6a94802f_3575_410f_8d65_f1d11165a10e.slice/crio-566c495aee8c357f518c6e50ef43bc433188261f7acd4ee3d4ed72ec7d7c2b9e WatchSource:0}: Error finding container 566c495aee8c357f518c6e50ef43bc433188261f7acd4ee3d4ed72ec7d7c2b9e: Status 404 returned error can't find the container with id 566c495aee8c357f518c6e50ef43bc433188261f7acd4ee3d4ed72ec7d7c2b9e Oct 13 21:24:02 crc kubenswrapper[4689]: I1013 21:24:02.968309 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5555666847-lgpdl" event={"ID":"6a94802f-3575-410f-8d65-f1d11165a10e","Type":"ContainerStarted","Data":"566c495aee8c357f518c6e50ef43bc433188261f7acd4ee3d4ed72ec7d7c2b9e"} Oct 13 21:24:04 crc kubenswrapper[4689]: I1013 21:24:04.504292 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gm5b5"] Oct 13 21:24:04 crc kubenswrapper[4689]: I1013 21:24:04.506128 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gm5b5" Oct 13 21:24:04 crc kubenswrapper[4689]: I1013 21:24:04.514234 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gm5b5"] Oct 13 21:24:04 crc kubenswrapper[4689]: I1013 21:24:04.684762 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxdq2\" (UniqueName: \"kubernetes.io/projected/bf0077d3-332f-4a7b-8238-fe2aa4effe13-kube-api-access-pxdq2\") pod \"redhat-operators-gm5b5\" (UID: \"bf0077d3-332f-4a7b-8238-fe2aa4effe13\") " pod="openshift-marketplace/redhat-operators-gm5b5" Oct 13 21:24:04 crc kubenswrapper[4689]: I1013 21:24:04.684829 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf0077d3-332f-4a7b-8238-fe2aa4effe13-utilities\") pod \"redhat-operators-gm5b5\" (UID: \"bf0077d3-332f-4a7b-8238-fe2aa4effe13\") " pod="openshift-marketplace/redhat-operators-gm5b5" Oct 13 21:24:04 crc kubenswrapper[4689]: I1013 21:24:04.684882 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf0077d3-332f-4a7b-8238-fe2aa4effe13-catalog-content\") pod \"redhat-operators-gm5b5\" (UID: \"bf0077d3-332f-4a7b-8238-fe2aa4effe13\") " pod="openshift-marketplace/redhat-operators-gm5b5" Oct 13 21:24:04 crc kubenswrapper[4689]: I1013 21:24:04.786476 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxdq2\" (UniqueName: \"kubernetes.io/projected/bf0077d3-332f-4a7b-8238-fe2aa4effe13-kube-api-access-pxdq2\") pod \"redhat-operators-gm5b5\" (UID: \"bf0077d3-332f-4a7b-8238-fe2aa4effe13\") " pod="openshift-marketplace/redhat-operators-gm5b5" Oct 13 21:24:04 crc kubenswrapper[4689]: I1013 21:24:04.786523 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf0077d3-332f-4a7b-8238-fe2aa4effe13-utilities\") pod \"redhat-operators-gm5b5\" (UID: \"bf0077d3-332f-4a7b-8238-fe2aa4effe13\") " pod="openshift-marketplace/redhat-operators-gm5b5" Oct 13 21:24:04 crc kubenswrapper[4689]: I1013 21:24:04.786578 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf0077d3-332f-4a7b-8238-fe2aa4effe13-catalog-content\") pod \"redhat-operators-gm5b5\" (UID: \"bf0077d3-332f-4a7b-8238-fe2aa4effe13\") " pod="openshift-marketplace/redhat-operators-gm5b5" Oct 13 21:24:04 crc kubenswrapper[4689]: I1013 21:24:04.787467 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf0077d3-332f-4a7b-8238-fe2aa4effe13-catalog-content\") pod \"redhat-operators-gm5b5\" (UID: \"bf0077d3-332f-4a7b-8238-fe2aa4effe13\") " pod="openshift-marketplace/redhat-operators-gm5b5" Oct 13 21:24:04 crc kubenswrapper[4689]: I1013 21:24:04.788066 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf0077d3-332f-4a7b-8238-fe2aa4effe13-utilities\") pod \"redhat-operators-gm5b5\" (UID: \"bf0077d3-332f-4a7b-8238-fe2aa4effe13\") " pod="openshift-marketplace/redhat-operators-gm5b5" Oct 13 21:24:04 crc kubenswrapper[4689]: I1013 21:24:04.821942 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxdq2\" (UniqueName: \"kubernetes.io/projected/bf0077d3-332f-4a7b-8238-fe2aa4effe13-kube-api-access-pxdq2\") pod \"redhat-operators-gm5b5\" (UID: \"bf0077d3-332f-4a7b-8238-fe2aa4effe13\") " pod="openshift-marketplace/redhat-operators-gm5b5" Oct 13 21:24:04 crc kubenswrapper[4689]: I1013 21:24:04.838975 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gm5b5" Oct 13 21:24:06 crc kubenswrapper[4689]: I1013 21:24:06.690640 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gm5b5"] Oct 13 21:24:06 crc kubenswrapper[4689]: I1013 21:24:06.997786 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5555666847-lgpdl" event={"ID":"6a94802f-3575-410f-8d65-f1d11165a10e","Type":"ContainerStarted","Data":"3839ae742132e9267eda6835dbc5becee5a2ed4b2ff01e6543798b3fb7410c06"} Oct 13 21:24:06 crc kubenswrapper[4689]: I1013 21:24:06.999525 4689 generic.go:334] "Generic (PLEG): container finished" podID="bf0077d3-332f-4a7b-8238-fe2aa4effe13" containerID="f8be237d41ce54c9cfe16bd27c3fdb90bf37e3feb92eb610ab5171196d650a0c" exitCode=0 Oct 13 21:24:06 crc kubenswrapper[4689]: I1013 21:24:06.999561 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gm5b5" event={"ID":"bf0077d3-332f-4a7b-8238-fe2aa4effe13","Type":"ContainerDied","Data":"f8be237d41ce54c9cfe16bd27c3fdb90bf37e3feb92eb610ab5171196d650a0c"} Oct 13 21:24:06 crc kubenswrapper[4689]: I1013 21:24:06.999578 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gm5b5" event={"ID":"bf0077d3-332f-4a7b-8238-fe2aa4effe13","Type":"ContainerStarted","Data":"23cb95cfbd023ad9fccad5dbeb7d4cca9e828b8cdc2188c56ee3f906b28d9d5d"} Oct 13 21:24:09 crc kubenswrapper[4689]: I1013 21:24:09.015814 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gm5b5" event={"ID":"bf0077d3-332f-4a7b-8238-fe2aa4effe13","Type":"ContainerStarted","Data":"8fcc86c5a4d18b097dd13b64201abefacffe693b58df3087999c3118b8a12d9a"} Oct 13 21:24:09 crc kubenswrapper[4689]: I1013 21:24:09.018346 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5555666847-lgpdl" event={"ID":"6a94802f-3575-410f-8d65-f1d11165a10e","Type":"ContainerStarted","Data":"c52e42a268736dcc96686684a431e99179cdf212b8de3e217e40530be796b608"} Oct 13 21:24:09 crc kubenswrapper[4689]: I1013 21:24:09.019178 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-5555666847-lgpdl" Oct 13 21:24:09 crc kubenswrapper[4689]: I1013 21:24:09.074748 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-5555666847-lgpdl" podStartSLOduration=1.5736179319999999 podStartE2EDuration="8.07472778s" podCreationTimestamp="2025-10-13 21:24:01 +0000 UTC" firstStartedPulling="2025-10-13 21:24:02.013036335 +0000 UTC m=+758.931281420" lastFinishedPulling="2025-10-13 21:24:08.514146183 +0000 UTC m=+765.432391268" observedRunningTime="2025-10-13 21:24:09.071853282 +0000 UTC m=+765.990098367" watchObservedRunningTime="2025-10-13 21:24:09.07472778 +0000 UTC m=+765.992972865" Oct 13 21:24:10 crc kubenswrapper[4689]: I1013 21:24:10.031144 4689 generic.go:334] "Generic (PLEG): container finished" podID="bf0077d3-332f-4a7b-8238-fe2aa4effe13" containerID="8fcc86c5a4d18b097dd13b64201abefacffe693b58df3087999c3118b8a12d9a" exitCode=0 Oct 13 21:24:10 crc kubenswrapper[4689]: I1013 21:24:10.031237 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gm5b5" event={"ID":"bf0077d3-332f-4a7b-8238-fe2aa4effe13","Type":"ContainerDied","Data":"8fcc86c5a4d18b097dd13b64201abefacffe693b58df3087999c3118b8a12d9a"} Oct 13 21:24:11 crc kubenswrapper[4689]: I1013 21:24:11.040232 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gm5b5" event={"ID":"bf0077d3-332f-4a7b-8238-fe2aa4effe13","Type":"ContainerStarted","Data":"759c8e201289d334493f7658222f59e91ad35a47dc109c5bdc546ac4af291ae9"} Oct 13 21:24:11 crc kubenswrapper[4689]: I1013 21:24:11.045234 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-5555666847-lgpdl" Oct 13 21:24:11 crc kubenswrapper[4689]: I1013 21:24:11.062018 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gm5b5" podStartSLOduration=3.5023648769999998 podStartE2EDuration="7.061996776s" podCreationTimestamp="2025-10-13 21:24:04 +0000 UTC" firstStartedPulling="2025-10-13 21:24:07.00134788 +0000 UTC m=+763.919592975" lastFinishedPulling="2025-10-13 21:24:10.560979769 +0000 UTC m=+767.479224874" observedRunningTime="2025-10-13 21:24:11.060781718 +0000 UTC m=+767.979026823" watchObservedRunningTime="2025-10-13 21:24:11.061996776 +0000 UTC m=+767.980241881" Oct 13 21:24:14 crc kubenswrapper[4689]: I1013 21:24:14.839495 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gm5b5" Oct 13 21:24:14 crc kubenswrapper[4689]: I1013 21:24:14.840300 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gm5b5" Oct 13 21:24:15 crc kubenswrapper[4689]: I1013 21:24:15.886080 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gm5b5" podUID="bf0077d3-332f-4a7b-8238-fe2aa4effe13" containerName="registry-server" probeResult="failure" output=< Oct 13 21:24:15 crc kubenswrapper[4689]: timeout: failed to connect service ":50051" within 1s Oct 13 21:24:15 crc kubenswrapper[4689]: > Oct 13 21:24:23 crc kubenswrapper[4689]: I1013 21:24:23.858813 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:24:23 crc kubenswrapper[4689]: I1013 21:24:23.859537 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:24:24 crc kubenswrapper[4689]: I1013 21:24:24.895548 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gm5b5" Oct 13 21:24:24 crc kubenswrapper[4689]: I1013 21:24:24.977959 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gm5b5" Oct 13 21:24:25 crc kubenswrapper[4689]: I1013 21:24:25.127472 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gm5b5"] Oct 13 21:24:26 crc kubenswrapper[4689]: I1013 21:24:26.125616 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gm5b5" podUID="bf0077d3-332f-4a7b-8238-fe2aa4effe13" containerName="registry-server" containerID="cri-o://759c8e201289d334493f7658222f59e91ad35a47dc109c5bdc546ac4af291ae9" gracePeriod=2 Oct 13 21:24:26 crc kubenswrapper[4689]: I1013 21:24:26.591628 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gm5b5" Oct 13 21:24:26 crc kubenswrapper[4689]: I1013 21:24:26.708784 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf0077d3-332f-4a7b-8238-fe2aa4effe13-utilities\") pod \"bf0077d3-332f-4a7b-8238-fe2aa4effe13\" (UID: \"bf0077d3-332f-4a7b-8238-fe2aa4effe13\") " Oct 13 21:24:26 crc kubenswrapper[4689]: I1013 21:24:26.708858 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf0077d3-332f-4a7b-8238-fe2aa4effe13-catalog-content\") pod \"bf0077d3-332f-4a7b-8238-fe2aa4effe13\" (UID: \"bf0077d3-332f-4a7b-8238-fe2aa4effe13\") " Oct 13 21:24:26 crc kubenswrapper[4689]: I1013 21:24:26.708928 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxdq2\" (UniqueName: \"kubernetes.io/projected/bf0077d3-332f-4a7b-8238-fe2aa4effe13-kube-api-access-pxdq2\") pod \"bf0077d3-332f-4a7b-8238-fe2aa4effe13\" (UID: \"bf0077d3-332f-4a7b-8238-fe2aa4effe13\") " Oct 13 21:24:26 crc kubenswrapper[4689]: I1013 21:24:26.709798 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf0077d3-332f-4a7b-8238-fe2aa4effe13-utilities" (OuterVolumeSpecName: "utilities") pod "bf0077d3-332f-4a7b-8238-fe2aa4effe13" (UID: "bf0077d3-332f-4a7b-8238-fe2aa4effe13"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:24:26 crc kubenswrapper[4689]: I1013 21:24:26.716853 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf0077d3-332f-4a7b-8238-fe2aa4effe13-kube-api-access-pxdq2" (OuterVolumeSpecName: "kube-api-access-pxdq2") pod "bf0077d3-332f-4a7b-8238-fe2aa4effe13" (UID: "bf0077d3-332f-4a7b-8238-fe2aa4effe13"). InnerVolumeSpecName "kube-api-access-pxdq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:24:26 crc kubenswrapper[4689]: I1013 21:24:26.799309 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf0077d3-332f-4a7b-8238-fe2aa4effe13-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bf0077d3-332f-4a7b-8238-fe2aa4effe13" (UID: "bf0077d3-332f-4a7b-8238-fe2aa4effe13"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:24:26 crc kubenswrapper[4689]: I1013 21:24:26.810532 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf0077d3-332f-4a7b-8238-fe2aa4effe13-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:24:26 crc kubenswrapper[4689]: I1013 21:24:26.810598 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf0077d3-332f-4a7b-8238-fe2aa4effe13-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:24:26 crc kubenswrapper[4689]: I1013 21:24:26.810619 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxdq2\" (UniqueName: \"kubernetes.io/projected/bf0077d3-332f-4a7b-8238-fe2aa4effe13-kube-api-access-pxdq2\") on node \"crc\" DevicePath \"\"" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.133608 4689 generic.go:334] "Generic (PLEG): container finished" podID="bf0077d3-332f-4a7b-8238-fe2aa4effe13" containerID="759c8e201289d334493f7658222f59e91ad35a47dc109c5bdc546ac4af291ae9" exitCode=0 Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.133616 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gm5b5" event={"ID":"bf0077d3-332f-4a7b-8238-fe2aa4effe13","Type":"ContainerDied","Data":"759c8e201289d334493f7658222f59e91ad35a47dc109c5bdc546ac4af291ae9"} Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.133707 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gm5b5" event={"ID":"bf0077d3-332f-4a7b-8238-fe2aa4effe13","Type":"ContainerDied","Data":"23cb95cfbd023ad9fccad5dbeb7d4cca9e828b8cdc2188c56ee3f906b28d9d5d"} Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.133724 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gm5b5" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.133733 4689 scope.go:117] "RemoveContainer" containerID="759c8e201289d334493f7658222f59e91ad35a47dc109c5bdc546ac4af291ae9" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.158749 4689 scope.go:117] "RemoveContainer" containerID="8fcc86c5a4d18b097dd13b64201abefacffe693b58df3087999c3118b8a12d9a" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.170001 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gm5b5"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.174259 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gm5b5"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.188655 4689 scope.go:117] "RemoveContainer" containerID="f8be237d41ce54c9cfe16bd27c3fdb90bf37e3feb92eb610ab5171196d650a0c" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.193709 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f84fcdbb-pvsmw"] Oct 13 21:24:27 crc kubenswrapper[4689]: E1013 21:24:27.193975 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf0077d3-332f-4a7b-8238-fe2aa4effe13" containerName="registry-server" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.193992 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf0077d3-332f-4a7b-8238-fe2aa4effe13" containerName="registry-server" Oct 13 21:24:27 crc kubenswrapper[4689]: E1013 21:24:27.194008 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf0077d3-332f-4a7b-8238-fe2aa4effe13" containerName="extract-content" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.194016 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf0077d3-332f-4a7b-8238-fe2aa4effe13" containerName="extract-content" Oct 13 21:24:27 crc kubenswrapper[4689]: E1013 21:24:27.194032 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf0077d3-332f-4a7b-8238-fe2aa4effe13" containerName="extract-utilities" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.194038 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf0077d3-332f-4a7b-8238-fe2aa4effe13" containerName="extract-utilities" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.194142 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf0077d3-332f-4a7b-8238-fe2aa4effe13" containerName="registry-server" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.194728 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-pvsmw" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.200134 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-28trm" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.210246 4689 scope.go:117] "RemoveContainer" containerID="759c8e201289d334493f7658222f59e91ad35a47dc109c5bdc546ac4af291ae9" Oct 13 21:24:27 crc kubenswrapper[4689]: E1013 21:24:27.210646 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"759c8e201289d334493f7658222f59e91ad35a47dc109c5bdc546ac4af291ae9\": container with ID starting with 759c8e201289d334493f7658222f59e91ad35a47dc109c5bdc546ac4af291ae9 not found: ID does not exist" containerID="759c8e201289d334493f7658222f59e91ad35a47dc109c5bdc546ac4af291ae9" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.210679 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"759c8e201289d334493f7658222f59e91ad35a47dc109c5bdc546ac4af291ae9"} err="failed to get container status \"759c8e201289d334493f7658222f59e91ad35a47dc109c5bdc546ac4af291ae9\": rpc error: code = NotFound desc = could not find container \"759c8e201289d334493f7658222f59e91ad35a47dc109c5bdc546ac4af291ae9\": container with ID starting with 759c8e201289d334493f7658222f59e91ad35a47dc109c5bdc546ac4af291ae9 not found: ID does not exist" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.210704 4689 scope.go:117] "RemoveContainer" containerID="8fcc86c5a4d18b097dd13b64201abefacffe693b58df3087999c3118b8a12d9a" Oct 13 21:24:27 crc kubenswrapper[4689]: E1013 21:24:27.211177 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fcc86c5a4d18b097dd13b64201abefacffe693b58df3087999c3118b8a12d9a\": container with ID starting with 8fcc86c5a4d18b097dd13b64201abefacffe693b58df3087999c3118b8a12d9a not found: ID does not exist" containerID="8fcc86c5a4d18b097dd13b64201abefacffe693b58df3087999c3118b8a12d9a" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.211219 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fcc86c5a4d18b097dd13b64201abefacffe693b58df3087999c3118b8a12d9a"} err="failed to get container status \"8fcc86c5a4d18b097dd13b64201abefacffe693b58df3087999c3118b8a12d9a\": rpc error: code = NotFound desc = could not find container \"8fcc86c5a4d18b097dd13b64201abefacffe693b58df3087999c3118b8a12d9a\": container with ID starting with 8fcc86c5a4d18b097dd13b64201abefacffe693b58df3087999c3118b8a12d9a not found: ID does not exist" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.211236 4689 scope.go:117] "RemoveContainer" containerID="f8be237d41ce54c9cfe16bd27c3fdb90bf37e3feb92eb610ab5171196d650a0c" Oct 13 21:24:27 crc kubenswrapper[4689]: E1013 21:24:27.211496 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8be237d41ce54c9cfe16bd27c3fdb90bf37e3feb92eb610ab5171196d650a0c\": container with ID starting with f8be237d41ce54c9cfe16bd27c3fdb90bf37e3feb92eb610ab5171196d650a0c not found: ID does not exist" containerID="f8be237d41ce54c9cfe16bd27c3fdb90bf37e3feb92eb610ab5171196d650a0c" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.211524 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8be237d41ce54c9cfe16bd27c3fdb90bf37e3feb92eb610ab5171196d650a0c"} err="failed to get container status \"f8be237d41ce54c9cfe16bd27c3fdb90bf37e3feb92eb610ab5171196d650a0c\": rpc error: code = NotFound desc = could not find container \"f8be237d41ce54c9cfe16bd27c3fdb90bf37e3feb92eb610ab5171196d650a0c\": container with ID starting with f8be237d41ce54c9cfe16bd27c3fdb90bf37e3feb92eb610ab5171196d650a0c not found: ID does not exist" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.213004 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f84fcdbb-pvsmw"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.222027 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-59cdc64769-w28f4"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.223038 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-w28f4" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.230567 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-tklhj" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.232064 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-687df44cdb-ltx5l"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.233072 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ltx5l" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.234985 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-rbhln" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.243930 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-7bb46cd7d-zm9b4"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.245324 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-zm9b4" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.253064 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-9rzdz" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.259206 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-6d9967f8dd-8lb6r"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.265682 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-8lb6r" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.271534 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-zmwvw" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.281646 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-59cdc64769-w28f4"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.295948 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7bb46cd7d-zm9b4"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.301221 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-687df44cdb-ltx5l"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.312963 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-6d9967f8dd-8lb6r"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.317521 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sb6k8\" (UniqueName: \"kubernetes.io/projected/3bca3670-4880-4598-abbd-8ed51e351c5a-kube-api-access-sb6k8\") pod \"designate-operator-controller-manager-687df44cdb-ltx5l\" (UID: \"3bca3670-4880-4598-abbd-8ed51e351c5a\") " pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ltx5l" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.317645 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljxbm\" (UniqueName: \"kubernetes.io/projected/574e8237-5b30-4af8-b93f-449d9ec98793-kube-api-access-ljxbm\") pod \"cinder-operator-controller-manager-59cdc64769-w28f4\" (UID: \"574e8237-5b30-4af8-b93f-449d9ec98793\") " pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-w28f4" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.317681 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rmgc\" (UniqueName: \"kubernetes.io/projected/5f5620d8-6856-4b27-b74a-208edc1ec0d7-kube-api-access-4rmgc\") pod \"barbican-operator-controller-manager-64f84fcdbb-pvsmw\" (UID: \"5f5620d8-6856-4b27-b74a-208edc1ec0d7\") " pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-pvsmw" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.317723 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skq8w\" (UniqueName: \"kubernetes.io/projected/2ed371e4-bae8-4320-9b6b-e28103137aee-kube-api-access-skq8w\") pod \"glance-operator-controller-manager-7bb46cd7d-zm9b4\" (UID: \"2ed371e4-bae8-4320-9b6b-e28103137aee\") " pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-zm9b4" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.317762 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2l6vz\" (UniqueName: \"kubernetes.io/projected/980922e5-08ec-418a-b207-f463195cc6da-kube-api-access-2l6vz\") pod \"heat-operator-controller-manager-6d9967f8dd-8lb6r\" (UID: \"980922e5-08ec-418a-b207-f463195cc6da\") " pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-8lb6r" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.328337 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d74794d9b-gg4tl"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.329234 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-gg4tl" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.338686 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-8cxfb" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.356990 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.358676 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.363745 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-qtv7b" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.368449 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.384157 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d74794d9b-gg4tl"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.388384 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-74cb5cbc49-mrv92"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.405453 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-mrv92" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.423878 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-8f5tf" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.424576 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljxbm\" (UniqueName: \"kubernetes.io/projected/574e8237-5b30-4af8-b93f-449d9ec98793-kube-api-access-ljxbm\") pod \"cinder-operator-controller-manager-59cdc64769-w28f4\" (UID: \"574e8237-5b30-4af8-b93f-449d9ec98793\") " pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-w28f4" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.424638 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rmgc\" (UniqueName: \"kubernetes.io/projected/5f5620d8-6856-4b27-b74a-208edc1ec0d7-kube-api-access-4rmgc\") pod \"barbican-operator-controller-manager-64f84fcdbb-pvsmw\" (UID: \"5f5620d8-6856-4b27-b74a-208edc1ec0d7\") " pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-pvsmw" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.424669 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgn92\" (UniqueName: \"kubernetes.io/projected/ff2d1098-a378-4314-8662-1dfb98c56aae-kube-api-access-hgn92\") pod \"infra-operator-controller-manager-585fc5b659-v8wsd\" (UID: \"ff2d1098-a378-4314-8662-1dfb98c56aae\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.424690 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n979k\" (UniqueName: \"kubernetes.io/projected/97a56885-e550-415b-95be-3f61e0ac38e5-kube-api-access-n979k\") pod \"horizon-operator-controller-manager-6d74794d9b-gg4tl\" (UID: \"97a56885-e550-415b-95be-3f61e0ac38e5\") " pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-gg4tl" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.424716 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skq8w\" (UniqueName: \"kubernetes.io/projected/2ed371e4-bae8-4320-9b6b-e28103137aee-kube-api-access-skq8w\") pod \"glance-operator-controller-manager-7bb46cd7d-zm9b4\" (UID: \"2ed371e4-bae8-4320-9b6b-e28103137aee\") " pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-zm9b4" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.424745 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2l6vz\" (UniqueName: \"kubernetes.io/projected/980922e5-08ec-418a-b207-f463195cc6da-kube-api-access-2l6vz\") pod \"heat-operator-controller-manager-6d9967f8dd-8lb6r\" (UID: \"980922e5-08ec-418a-b207-f463195cc6da\") " pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-8lb6r" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.424786 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sb6k8\" (UniqueName: \"kubernetes.io/projected/3bca3670-4880-4598-abbd-8ed51e351c5a-kube-api-access-sb6k8\") pod \"designate-operator-controller-manager-687df44cdb-ltx5l\" (UID: \"3bca3670-4880-4598-abbd-8ed51e351c5a\") " pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ltx5l" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.424819 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ff2d1098-a378-4314-8662-1dfb98c56aae-cert\") pod \"infra-operator-controller-manager-585fc5b659-v8wsd\" (UID: \"ff2d1098-a378-4314-8662-1dfb98c56aae\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.425568 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-ddb98f99b-7zs79"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.426701 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-7zs79" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.428304 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-889ln" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.463814 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.476748 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljxbm\" (UniqueName: \"kubernetes.io/projected/574e8237-5b30-4af8-b93f-449d9ec98793-kube-api-access-ljxbm\") pod \"cinder-operator-controller-manager-59cdc64769-w28f4\" (UID: \"574e8237-5b30-4af8-b93f-449d9ec98793\") " pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-w28f4" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.477207 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rmgc\" (UniqueName: \"kubernetes.io/projected/5f5620d8-6856-4b27-b74a-208edc1ec0d7-kube-api-access-4rmgc\") pod \"barbican-operator-controller-manager-64f84fcdbb-pvsmw\" (UID: \"5f5620d8-6856-4b27-b74a-208edc1ec0d7\") " pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-pvsmw" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.483514 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sb6k8\" (UniqueName: \"kubernetes.io/projected/3bca3670-4880-4598-abbd-8ed51e351c5a-kube-api-access-sb6k8\") pod \"designate-operator-controller-manager-687df44cdb-ltx5l\" (UID: \"3bca3670-4880-4598-abbd-8ed51e351c5a\") " pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ltx5l" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.492243 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-74cb5cbc49-mrv92"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.493639 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2l6vz\" (UniqueName: \"kubernetes.io/projected/980922e5-08ec-418a-b207-f463195cc6da-kube-api-access-2l6vz\") pod \"heat-operator-controller-manager-6d9967f8dd-8lb6r\" (UID: \"980922e5-08ec-418a-b207-f463195cc6da\") " pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-8lb6r" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.499295 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-59578bc799-v664m"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.501986 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-59578bc799-v664m" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.505341 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skq8w\" (UniqueName: \"kubernetes.io/projected/2ed371e4-bae8-4320-9b6b-e28103137aee-kube-api-access-skq8w\") pod \"glance-operator-controller-manager-7bb46cd7d-zm9b4\" (UID: \"2ed371e4-bae8-4320-9b6b-e28103137aee\") " pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-zm9b4" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.509858 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5777b4f897-qjpss"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.511037 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-qjpss" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.520312 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-dp9fd" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.520607 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-5nm9t" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.525671 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwz4r\" (UniqueName: \"kubernetes.io/projected/4e3b3f49-bb44-4375-9bab-527a5e0e57a5-kube-api-access-rwz4r\") pod \"ironic-operator-controller-manager-74cb5cbc49-mrv92\" (UID: \"4e3b3f49-bb44-4375-9bab-527a5e0e57a5\") " pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-mrv92" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.525748 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tngq\" (UniqueName: \"kubernetes.io/projected/de26ce24-8f8f-42e6-bd80-5331eb11f6b1-kube-api-access-7tngq\") pod \"keystone-operator-controller-manager-ddb98f99b-7zs79\" (UID: \"de26ce24-8f8f-42e6-bd80-5331eb11f6b1\") " pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-7zs79" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.525798 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ff2d1098-a378-4314-8662-1dfb98c56aae-cert\") pod \"infra-operator-controller-manager-585fc5b659-v8wsd\" (UID: \"ff2d1098-a378-4314-8662-1dfb98c56aae\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.525840 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n979k\" (UniqueName: \"kubernetes.io/projected/97a56885-e550-415b-95be-3f61e0ac38e5-kube-api-access-n979k\") pod \"horizon-operator-controller-manager-6d74794d9b-gg4tl\" (UID: \"97a56885-e550-415b-95be-3f61e0ac38e5\") " pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-gg4tl" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.525856 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgn92\" (UniqueName: \"kubernetes.io/projected/ff2d1098-a378-4314-8662-1dfb98c56aae-kube-api-access-hgn92\") pod \"infra-operator-controller-manager-585fc5b659-v8wsd\" (UID: \"ff2d1098-a378-4314-8662-1dfb98c56aae\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" Oct 13 21:24:27 crc kubenswrapper[4689]: E1013 21:24:27.526226 4689 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 13 21:24:27 crc kubenswrapper[4689]: E1013 21:24:27.526289 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ff2d1098-a378-4314-8662-1dfb98c56aae-cert podName:ff2d1098-a378-4314-8662-1dfb98c56aae nodeName:}" failed. No retries permitted until 2025-10-13 21:24:28.026261507 +0000 UTC m=+784.944506592 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ff2d1098-a378-4314-8662-1dfb98c56aae-cert") pod "infra-operator-controller-manager-585fc5b659-v8wsd" (UID: "ff2d1098-a378-4314-8662-1dfb98c56aae") : secret "infra-operator-webhook-server-cert" not found Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.526545 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-59578bc799-v664m"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.537670 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-ddb98f99b-7zs79"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.556181 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5777b4f897-qjpss"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.564801 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-pvsmw" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.576431 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.578177 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-w28f4" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.578439 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.581546 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-g2hmx" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.586115 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-57bb74c7bf-xpqj5"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.587233 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-xpqj5" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.587490 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n979k\" (UniqueName: \"kubernetes.io/projected/97a56885-e550-415b-95be-3f61e0ac38e5-kube-api-access-n979k\") pod \"horizon-operator-controller-manager-6d74794d9b-gg4tl\" (UID: \"97a56885-e550-415b-95be-3f61e0ac38e5\") " pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-gg4tl" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.594887 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-ws5t4" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.601496 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-57bb74c7bf-xpqj5"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.605468 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.609113 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ltx5l" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.609622 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgn92\" (UniqueName: \"kubernetes.io/projected/ff2d1098-a378-4314-8662-1dfb98c56aae-kube-api-access-hgn92\") pod \"infra-operator-controller-manager-585fc5b659-v8wsd\" (UID: \"ff2d1098-a378-4314-8662-1dfb98c56aae\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.609634 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-kd8d5"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.611111 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-kd8d5" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.613174 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-kn6r9" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.616063 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-869cc7797f-f2kxw"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.618039 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-f2kxw" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.622889 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-zm9b4" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.624366 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-2k588" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.627147 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tngq\" (UniqueName: \"kubernetes.io/projected/de26ce24-8f8f-42e6-bd80-5331eb11f6b1-kube-api-access-7tngq\") pod \"keystone-operator-controller-manager-ddb98f99b-7zs79\" (UID: \"de26ce24-8f8f-42e6-bd80-5331eb11f6b1\") " pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-7zs79" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.627255 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wz68\" (UniqueName: \"kubernetes.io/projected/87d04908-37f4-42ab-8328-893b4e255767-kube-api-access-6wz68\") pod \"mariadb-operator-controller-manager-5777b4f897-qjpss\" (UID: \"87d04908-37f4-42ab-8328-893b4e255767\") " pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-qjpss" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.627306 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c26x2\" (UniqueName: \"kubernetes.io/projected/f3d70a25-802f-4d17-a250-3b76584ff7dc-kube-api-access-c26x2\") pod \"manila-operator-controller-manager-59578bc799-v664m\" (UID: \"f3d70a25-802f-4d17-a250-3b76584ff7dc\") " pod="openstack-operators/manila-operator-controller-manager-59578bc799-v664m" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.627337 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwz4r\" (UniqueName: \"kubernetes.io/projected/4e3b3f49-bb44-4375-9bab-527a5e0e57a5-kube-api-access-rwz4r\") pod \"ironic-operator-controller-manager-74cb5cbc49-mrv92\" (UID: \"4e3b3f49-bb44-4375-9bab-527a5e0e57a5\") " pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-mrv92" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.639241 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-8lb6r" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.663527 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-gg4tl" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.663775 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwz4r\" (UniqueName: \"kubernetes.io/projected/4e3b3f49-bb44-4375-9bab-527a5e0e57a5-kube-api-access-rwz4r\") pod \"ironic-operator-controller-manager-74cb5cbc49-mrv92\" (UID: \"4e3b3f49-bb44-4375-9bab-527a5e0e57a5\") " pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-mrv92" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.664748 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-kd8d5"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.666916 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tngq\" (UniqueName: \"kubernetes.io/projected/de26ce24-8f8f-42e6-bd80-5331eb11f6b1-kube-api-access-7tngq\") pod \"keystone-operator-controller-manager-ddb98f99b-7zs79\" (UID: \"de26ce24-8f8f-42e6-bd80-5331eb11f6b1\") " pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-7zs79" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.689329 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-869cc7797f-f2kxw"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.697208 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.702290 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.713135 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.716355 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-664664cb68-j6wsf"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.717856 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-664664cb68-j6wsf" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.713431 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-95lxf" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.716534 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.723535 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-tdg8t" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.729334 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpmft\" (UniqueName: \"kubernetes.io/projected/86e5e806-711e-4a41-9c65-0b121d0228e6-kube-api-access-zpmft\") pod \"nova-operator-controller-manager-57bb74c7bf-xpqj5\" (UID: \"86e5e806-711e-4a41-9c65-0b121d0228e6\") " pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-xpqj5" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.729472 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krxc2\" (UniqueName: \"kubernetes.io/projected/bc1916e6-51d4-4ca9-b8a2-8be1659426a2-kube-api-access-krxc2\") pod \"octavia-operator-controller-manager-6d7c7ddf95-kd8d5\" (UID: \"bc1916e6-51d4-4ca9-b8a2-8be1659426a2\") " pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-kd8d5" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.729500 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wz68\" (UniqueName: \"kubernetes.io/projected/87d04908-37f4-42ab-8328-893b4e255767-kube-api-access-6wz68\") pod \"mariadb-operator-controller-manager-5777b4f897-qjpss\" (UID: \"87d04908-37f4-42ab-8328-893b4e255767\") " pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-qjpss" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.729527 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qk6t\" (UniqueName: \"kubernetes.io/projected/d34fac28-ebdd-4c77-ad9d-995611ee01d4-kube-api-access-6qk6t\") pod \"ovn-operator-controller-manager-869cc7797f-f2kxw\" (UID: \"d34fac28-ebdd-4c77-ad9d-995611ee01d4\") " pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-f2kxw" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.729559 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c26x2\" (UniqueName: \"kubernetes.io/projected/f3d70a25-802f-4d17-a250-3b76584ff7dc-kube-api-access-c26x2\") pod \"manila-operator-controller-manager-59578bc799-v664m\" (UID: \"f3d70a25-802f-4d17-a250-3b76584ff7dc\") " pod="openstack-operators/manila-operator-controller-manager-59578bc799-v664m" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.729581 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbgg7\" (UniqueName: \"kubernetes.io/projected/5dc35208-04aa-4df8-af17-6ce8ad80199f-kube-api-access-tbgg7\") pod \"neutron-operator-controller-manager-797d478b46-jlgcf\" (UID: \"5dc35208-04aa-4df8-af17-6ce8ad80199f\") " pod="openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.755722 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-mrv92" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.758061 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-2g8g7"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.759261 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-2g8g7" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.779831 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-rf28m" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.780205 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-664664cb68-j6wsf"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.789092 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-7zs79" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.793532 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wz68\" (UniqueName: \"kubernetes.io/projected/87d04908-37f4-42ab-8328-893b4e255767-kube-api-access-6wz68\") pod \"mariadb-operator-controller-manager-5777b4f897-qjpss\" (UID: \"87d04908-37f4-42ab-8328-893b4e255767\") " pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-qjpss" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.801487 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c26x2\" (UniqueName: \"kubernetes.io/projected/f3d70a25-802f-4d17-a250-3b76584ff7dc-kube-api-access-c26x2\") pod \"manila-operator-controller-manager-59578bc799-v664m\" (UID: \"f3d70a25-802f-4d17-a250-3b76584ff7dc\") " pod="openstack-operators/manila-operator-controller-manager-59578bc799-v664m" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.828257 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-2g8g7"] Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.833453 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krxc2\" (UniqueName: \"kubernetes.io/projected/bc1916e6-51d4-4ca9-b8a2-8be1659426a2-kube-api-access-krxc2\") pod \"octavia-operator-controller-manager-6d7c7ddf95-kd8d5\" (UID: \"bc1916e6-51d4-4ca9-b8a2-8be1659426a2\") " pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-kd8d5" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.833503 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qk6t\" (UniqueName: \"kubernetes.io/projected/d34fac28-ebdd-4c77-ad9d-995611ee01d4-kube-api-access-6qk6t\") pod \"ovn-operator-controller-manager-869cc7797f-f2kxw\" (UID: \"d34fac28-ebdd-4c77-ad9d-995611ee01d4\") " pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-f2kxw" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.833555 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/599721f1-ec3e-4a83-b769-db5440b2f260-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7\" (UID: \"599721f1-ec3e-4a83-b769-db5440b2f260\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.833579 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbgg7\" (UniqueName: \"kubernetes.io/projected/5dc35208-04aa-4df8-af17-6ce8ad80199f-kube-api-access-tbgg7\") pod \"neutron-operator-controller-manager-797d478b46-jlgcf\" (UID: \"5dc35208-04aa-4df8-af17-6ce8ad80199f\") " pod="openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.833633 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n667\" (UniqueName: \"kubernetes.io/projected/599721f1-ec3e-4a83-b769-db5440b2f260-kube-api-access-8n667\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7\" (UID: \"599721f1-ec3e-4a83-b769-db5440b2f260\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.833662 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n7hr\" (UniqueName: \"kubernetes.io/projected/3c9cfbce-22ae-4c0d-9b73-513bf285b4a0-kube-api-access-8n7hr\") pod \"swift-operator-controller-manager-5f4d5dfdc6-2g8g7\" (UID: \"3c9cfbce-22ae-4c0d-9b73-513bf285b4a0\") " pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-2g8g7" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.833705 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpmft\" (UniqueName: \"kubernetes.io/projected/86e5e806-711e-4a41-9c65-0b121d0228e6-kube-api-access-zpmft\") pod \"nova-operator-controller-manager-57bb74c7bf-xpqj5\" (UID: \"86e5e806-711e-4a41-9c65-0b121d0228e6\") " pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-xpqj5" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.833736 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hc68d\" (UniqueName: \"kubernetes.io/projected/261f1cfd-d8a7-4dea-baa7-3feb8f67813a-kube-api-access-hc68d\") pod \"placement-operator-controller-manager-664664cb68-j6wsf\" (UID: \"261f1cfd-d8a7-4dea-baa7-3feb8f67813a\") " pod="openstack-operators/placement-operator-controller-manager-664664cb68-j6wsf" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.882960 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krxc2\" (UniqueName: \"kubernetes.io/projected/bc1916e6-51d4-4ca9-b8a2-8be1659426a2-kube-api-access-krxc2\") pod \"octavia-operator-controller-manager-6d7c7ddf95-kd8d5\" (UID: \"bc1916e6-51d4-4ca9-b8a2-8be1659426a2\") " pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-kd8d5" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.894997 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-59578bc799-v664m" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.895239 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbgg7\" (UniqueName: \"kubernetes.io/projected/5dc35208-04aa-4df8-af17-6ce8ad80199f-kube-api-access-tbgg7\") pod \"neutron-operator-controller-manager-797d478b46-jlgcf\" (UID: \"5dc35208-04aa-4df8-af17-6ce8ad80199f\") " pod="openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.897354 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpmft\" (UniqueName: \"kubernetes.io/projected/86e5e806-711e-4a41-9c65-0b121d0228e6-kube-api-access-zpmft\") pod \"nova-operator-controller-manager-57bb74c7bf-xpqj5\" (UID: \"86e5e806-711e-4a41-9c65-0b121d0228e6\") " pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-xpqj5" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.912408 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-qjpss" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.912996 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qk6t\" (UniqueName: \"kubernetes.io/projected/d34fac28-ebdd-4c77-ad9d-995611ee01d4-kube-api-access-6qk6t\") pod \"ovn-operator-controller-manager-869cc7797f-f2kxw\" (UID: \"d34fac28-ebdd-4c77-ad9d-995611ee01d4\") " pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-f2kxw" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.925875 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.939942 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/599721f1-ec3e-4a83-b769-db5440b2f260-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7\" (UID: \"599721f1-ec3e-4a83-b769-db5440b2f260\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.940274 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n667\" (UniqueName: \"kubernetes.io/projected/599721f1-ec3e-4a83-b769-db5440b2f260-kube-api-access-8n667\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7\" (UID: \"599721f1-ec3e-4a83-b769-db5440b2f260\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.940328 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n7hr\" (UniqueName: \"kubernetes.io/projected/3c9cfbce-22ae-4c0d-9b73-513bf285b4a0-kube-api-access-8n7hr\") pod \"swift-operator-controller-manager-5f4d5dfdc6-2g8g7\" (UID: \"3c9cfbce-22ae-4c0d-9b73-513bf285b4a0\") " pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-2g8g7" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.940633 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hc68d\" (UniqueName: \"kubernetes.io/projected/261f1cfd-d8a7-4dea-baa7-3feb8f67813a-kube-api-access-hc68d\") pod \"placement-operator-controller-manager-664664cb68-j6wsf\" (UID: \"261f1cfd-d8a7-4dea-baa7-3feb8f67813a\") " pod="openstack-operators/placement-operator-controller-manager-664664cb68-j6wsf" Oct 13 21:24:27 crc kubenswrapper[4689]: E1013 21:24:27.946996 4689 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 13 21:24:27 crc kubenswrapper[4689]: E1013 21:24:27.947067 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/599721f1-ec3e-4a83-b769-db5440b2f260-cert podName:599721f1-ec3e-4a83-b769-db5440b2f260 nodeName:}" failed. No retries permitted until 2025-10-13 21:24:28.447048001 +0000 UTC m=+785.365293086 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/599721f1-ec3e-4a83-b769-db5440b2f260-cert") pod "openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" (UID: "599721f1-ec3e-4a83-b769-db5440b2f260") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.948549 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-xpqj5" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.970497 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-kd8d5" Oct 13 21:24:27 crc kubenswrapper[4689]: I1013 21:24:27.992136 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-f2kxw" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.041711 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n7hr\" (UniqueName: \"kubernetes.io/projected/3c9cfbce-22ae-4c0d-9b73-513bf285b4a0-kube-api-access-8n7hr\") pod \"swift-operator-controller-manager-5f4d5dfdc6-2g8g7\" (UID: \"3c9cfbce-22ae-4c0d-9b73-513bf285b4a0\") " pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-2g8g7" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.043381 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n667\" (UniqueName: \"kubernetes.io/projected/599721f1-ec3e-4a83-b769-db5440b2f260-kube-api-access-8n667\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7\" (UID: \"599721f1-ec3e-4a83-b769-db5440b2f260\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.044381 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ff2d1098-a378-4314-8662-1dfb98c56aae-cert\") pod \"infra-operator-controller-manager-585fc5b659-v8wsd\" (UID: \"ff2d1098-a378-4314-8662-1dfb98c56aae\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" Oct 13 21:24:28 crc kubenswrapper[4689]: E1013 21:24:28.044641 4689 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 13 21:24:28 crc kubenswrapper[4689]: E1013 21:24:28.044688 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ff2d1098-a378-4314-8662-1dfb98c56aae-cert podName:ff2d1098-a378-4314-8662-1dfb98c56aae nodeName:}" failed. No retries permitted until 2025-10-13 21:24:29.044670924 +0000 UTC m=+785.962915999 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ff2d1098-a378-4314-8662-1dfb98c56aae-cert") pod "infra-operator-controller-manager-585fc5b659-v8wsd" (UID: "ff2d1098-a378-4314-8662-1dfb98c56aae") : secret "infra-operator-webhook-server-cert" not found Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.045863 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf0077d3-332f-4a7b-8238-fe2aa4effe13" path="/var/lib/kubelet/pods/bf0077d3-332f-4a7b-8238-fe2aa4effe13/volumes" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.046507 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-578874c84d-n52xn"] Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.048497 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-n52xn" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.060447 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-vrbpx" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.060646 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hc68d\" (UniqueName: \"kubernetes.io/projected/261f1cfd-d8a7-4dea-baa7-3feb8f67813a-kube-api-access-hc68d\") pod \"placement-operator-controller-manager-664664cb68-j6wsf\" (UID: \"261f1cfd-d8a7-4dea-baa7-3feb8f67813a\") " pod="openstack-operators/placement-operator-controller-manager-664664cb68-j6wsf" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.067443 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-664664cb68-j6wsf" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.071914 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-578874c84d-n52xn"] Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.120068 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-2g8g7" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.123952 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-ffcdd6c94-fcfzv"] Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.126258 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-fcfzv" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.130994 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-fm267" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.136048 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-ffcdd6c94-fcfzv"] Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.152313 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k29l8\" (UniqueName: \"kubernetes.io/projected/6c0d5f43-6334-41be-bb4f-9d538d40004a-kube-api-access-k29l8\") pod \"telemetry-operator-controller-manager-578874c84d-n52xn\" (UID: \"6c0d5f43-6334-41be-bb4f-9d538d40004a\") " pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-n52xn" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.204906 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-646675d848-t5zxf"] Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.206492 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-646675d848-t5zxf" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.209177 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-9p4lx" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.249907 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-646675d848-t5zxf"] Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.253793 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-547nc\" (UniqueName: \"kubernetes.io/projected/61a8f77a-a34b-4e04-b508-fc0fb8e7ede7-kube-api-access-547nc\") pod \"watcher-operator-controller-manager-646675d848-t5zxf\" (UID: \"61a8f77a-a34b-4e04-b508-fc0fb8e7ede7\") " pod="openstack-operators/watcher-operator-controller-manager-646675d848-t5zxf" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.254092 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtscb\" (UniqueName: \"kubernetes.io/projected/d9a167f4-4f3c-44d9-9e18-7fdf79273d12-kube-api-access-mtscb\") pod \"test-operator-controller-manager-ffcdd6c94-fcfzv\" (UID: \"d9a167f4-4f3c-44d9-9e18-7fdf79273d12\") " pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-fcfzv" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.254201 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k29l8\" (UniqueName: \"kubernetes.io/projected/6c0d5f43-6334-41be-bb4f-9d538d40004a-kube-api-access-k29l8\") pod \"telemetry-operator-controller-manager-578874c84d-n52xn\" (UID: \"6c0d5f43-6334-41be-bb4f-9d538d40004a\") " pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-n52xn" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.315143 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj"] Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.316419 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.316752 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k29l8\" (UniqueName: \"kubernetes.io/projected/6c0d5f43-6334-41be-bb4f-9d538d40004a-kube-api-access-k29l8\") pod \"telemetry-operator-controller-manager-578874c84d-n52xn\" (UID: \"6c0d5f43-6334-41be-bb4f-9d538d40004a\") " pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-n52xn" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.324786 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.329655 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-lb6m6" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.339336 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj"] Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.355849 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghh46\" (UniqueName: \"kubernetes.io/projected/654be83b-acf2-4c39-b753-8f4cc7750052-kube-api-access-ghh46\") pod \"openstack-operator-controller-manager-7fcd588594-tnfjj\" (UID: \"654be83b-acf2-4c39-b753-8f4cc7750052\") " pod="openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.355920 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-547nc\" (UniqueName: \"kubernetes.io/projected/61a8f77a-a34b-4e04-b508-fc0fb8e7ede7-kube-api-access-547nc\") pod \"watcher-operator-controller-manager-646675d848-t5zxf\" (UID: \"61a8f77a-a34b-4e04-b508-fc0fb8e7ede7\") " pod="openstack-operators/watcher-operator-controller-manager-646675d848-t5zxf" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.355966 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtscb\" (UniqueName: \"kubernetes.io/projected/d9a167f4-4f3c-44d9-9e18-7fdf79273d12-kube-api-access-mtscb\") pod \"test-operator-controller-manager-ffcdd6c94-fcfzv\" (UID: \"d9a167f4-4f3c-44d9-9e18-7fdf79273d12\") " pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-fcfzv" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.356032 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/654be83b-acf2-4c39-b753-8f4cc7750052-cert\") pod \"openstack-operator-controller-manager-7fcd588594-tnfjj\" (UID: \"654be83b-acf2-4c39-b753-8f4cc7750052\") " pod="openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.379218 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-547nc\" (UniqueName: \"kubernetes.io/projected/61a8f77a-a34b-4e04-b508-fc0fb8e7ede7-kube-api-access-547nc\") pod \"watcher-operator-controller-manager-646675d848-t5zxf\" (UID: \"61a8f77a-a34b-4e04-b508-fc0fb8e7ede7\") " pod="openstack-operators/watcher-operator-controller-manager-646675d848-t5zxf" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.404774 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-n52xn" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.410535 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtscb\" (UniqueName: \"kubernetes.io/projected/d9a167f4-4f3c-44d9-9e18-7fdf79273d12-kube-api-access-mtscb\") pod \"test-operator-controller-manager-ffcdd6c94-fcfzv\" (UID: \"d9a167f4-4f3c-44d9-9e18-7fdf79273d12\") " pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-fcfzv" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.414206 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl"] Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.419686 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.430858 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-2g6kt" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.437091 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl"] Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.454324 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f84fcdbb-pvsmw"] Oct 13 21:24:28 crc kubenswrapper[4689]: E1013 21:24:28.457126 4689 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 13 21:24:28 crc kubenswrapper[4689]: E1013 21:24:28.457249 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/654be83b-acf2-4c39-b753-8f4cc7750052-cert podName:654be83b-acf2-4c39-b753-8f4cc7750052 nodeName:}" failed. No retries permitted until 2025-10-13 21:24:28.957212053 +0000 UTC m=+785.875457128 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/654be83b-acf2-4c39-b753-8f4cc7750052-cert") pod "openstack-operator-controller-manager-7fcd588594-tnfjj" (UID: "654be83b-acf2-4c39-b753-8f4cc7750052") : secret "webhook-server-cert" not found Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.456977 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/654be83b-acf2-4c39-b753-8f4cc7750052-cert\") pod \"openstack-operator-controller-manager-7fcd588594-tnfjj\" (UID: \"654be83b-acf2-4c39-b753-8f4cc7750052\") " pod="openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.459350 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/599721f1-ec3e-4a83-b769-db5440b2f260-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7\" (UID: \"599721f1-ec3e-4a83-b769-db5440b2f260\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.459445 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghh46\" (UniqueName: \"kubernetes.io/projected/654be83b-acf2-4c39-b753-8f4cc7750052-kube-api-access-ghh46\") pod \"openstack-operator-controller-manager-7fcd588594-tnfjj\" (UID: \"654be83b-acf2-4c39-b753-8f4cc7750052\") " pod="openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.459516 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svvr7\" (UniqueName: \"kubernetes.io/projected/04a373fa-1962-4bdc-8e26-53d557df6be3-kube-api-access-svvr7\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl\" (UID: \"04a373fa-1962-4bdc-8e26-53d557df6be3\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl" Oct 13 21:24:28 crc kubenswrapper[4689]: E1013 21:24:28.459653 4689 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 13 21:24:28 crc kubenswrapper[4689]: E1013 21:24:28.459735 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/599721f1-ec3e-4a83-b769-db5440b2f260-cert podName:599721f1-ec3e-4a83-b769-db5440b2f260 nodeName:}" failed. No retries permitted until 2025-10-13 21:24:29.459717672 +0000 UTC m=+786.377962757 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/599721f1-ec3e-4a83-b769-db5440b2f260-cert") pod "openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" (UID: "599721f1-ec3e-4a83-b769-db5440b2f260") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.481891 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-fcfzv" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.505676 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghh46\" (UniqueName: \"kubernetes.io/projected/654be83b-acf2-4c39-b753-8f4cc7750052-kube-api-access-ghh46\") pod \"openstack-operator-controller-manager-7fcd588594-tnfjj\" (UID: \"654be83b-acf2-4c39-b753-8f4cc7750052\") " pod="openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.560391 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svvr7\" (UniqueName: \"kubernetes.io/projected/04a373fa-1962-4bdc-8e26-53d557df6be3-kube-api-access-svvr7\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl\" (UID: \"04a373fa-1962-4bdc-8e26-53d557df6be3\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.568657 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-646675d848-t5zxf" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.590148 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svvr7\" (UniqueName: \"kubernetes.io/projected/04a373fa-1962-4bdc-8e26-53d557df6be3-kube-api-access-svvr7\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl\" (UID: \"04a373fa-1962-4bdc-8e26-53d557df6be3\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.633667 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-59cdc64769-w28f4"] Oct 13 21:24:28 crc kubenswrapper[4689]: W1013 21:24:28.650784 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod574e8237_5b30_4af8_b93f_449d9ec98793.slice/crio-d45aeebe4a9d8c67ca5012d7df47c65c80045c635dc93b3e522793bc216a4f0c WatchSource:0}: Error finding container d45aeebe4a9d8c67ca5012d7df47c65c80045c635dc93b3e522793bc216a4f0c: Status 404 returned error can't find the container with id d45aeebe4a9d8c67ca5012d7df47c65c80045c635dc93b3e522793bc216a4f0c Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.663199 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-687df44cdb-ltx5l"] Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.815795 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl" Oct 13 21:24:28 crc kubenswrapper[4689]: I1013 21:24:28.966632 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/654be83b-acf2-4c39-b753-8f4cc7750052-cert\") pod \"openstack-operator-controller-manager-7fcd588594-tnfjj\" (UID: \"654be83b-acf2-4c39-b753-8f4cc7750052\") " pod="openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj" Oct 13 21:24:28 crc kubenswrapper[4689]: E1013 21:24:28.966864 4689 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 13 21:24:28 crc kubenswrapper[4689]: E1013 21:24:28.966956 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/654be83b-acf2-4c39-b753-8f4cc7750052-cert podName:654be83b-acf2-4c39-b753-8f4cc7750052 nodeName:}" failed. No retries permitted until 2025-10-13 21:24:29.966927795 +0000 UTC m=+786.885172930 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/654be83b-acf2-4c39-b753-8f4cc7750052-cert") pod "openstack-operator-controller-manager-7fcd588594-tnfjj" (UID: "654be83b-acf2-4c39-b753-8f4cc7750052") : secret "webhook-server-cert" not found Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.026358 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-74cb5cbc49-mrv92"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.038658 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d74794d9b-gg4tl"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.061777 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-6d9967f8dd-8lb6r"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.068306 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ff2d1098-a378-4314-8662-1dfb98c56aae-cert\") pod \"infra-operator-controller-manager-585fc5b659-v8wsd\" (UID: \"ff2d1098-a378-4314-8662-1dfb98c56aae\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.074363 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ff2d1098-a378-4314-8662-1dfb98c56aae-cert\") pod \"infra-operator-controller-manager-585fc5b659-v8wsd\" (UID: \"ff2d1098-a378-4314-8662-1dfb98c56aae\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.167449 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-ddb98f99b-7zs79"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.174538 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7bb46cd7d-zm9b4"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.191983 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-7zs79" event={"ID":"de26ce24-8f8f-42e6-bd80-5331eb11f6b1","Type":"ContainerStarted","Data":"30f59db8c017e6957bcf894a63bf3fa5319536c5e175ff711a5d675a4809f7f0"} Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.193015 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-pvsmw" event={"ID":"5f5620d8-6856-4b27-b74a-208edc1ec0d7","Type":"ContainerStarted","Data":"7a0470f0108a2a9295f1516f6af0b302e4210ea7e1d4b95ea17e9b39e7607ecf"} Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.194346 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-mrv92" event={"ID":"4e3b3f49-bb44-4375-9bab-527a5e0e57a5","Type":"ContainerStarted","Data":"113da2392935a7112302006b70e17a2f990fec7a89f7c9d00e324569b0ed01b4"} Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.195913 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.196345 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-gg4tl" event={"ID":"97a56885-e550-415b-95be-3f61e0ac38e5","Type":"ContainerStarted","Data":"24d893bd3d96d31b0c93bdc37c2b8885f46c25db308f9607bc64b6c68aa87729"} Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.197723 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-w28f4" event={"ID":"574e8237-5b30-4af8-b93f-449d9ec98793","Type":"ContainerStarted","Data":"d45aeebe4a9d8c67ca5012d7df47c65c80045c635dc93b3e522793bc216a4f0c"} Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.199045 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-zm9b4" event={"ID":"2ed371e4-bae8-4320-9b6b-e28103137aee","Type":"ContainerStarted","Data":"d3f563f317cb08f19f5e8e9ddb4d66b61e7302f5e32886f08172fb8da51fdd09"} Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.199953 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-8lb6r" event={"ID":"980922e5-08ec-418a-b207-f463195cc6da","Type":"ContainerStarted","Data":"2151e5bd323c1be985696daacf981d8ab08c779e6a6ebc2963e87cc323c2252d"} Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.200838 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ltx5l" event={"ID":"3bca3670-4880-4598-abbd-8ed51e351c5a","Type":"ContainerStarted","Data":"52e1320cd92a4f46195fc965da18a5e0a87670102da000d35642d5a75eee0477"} Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.474326 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/599721f1-ec3e-4a83-b769-db5440b2f260-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7\" (UID: \"599721f1-ec3e-4a83-b769-db5440b2f260\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.480103 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/599721f1-ec3e-4a83-b769-db5440b2f260-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7\" (UID: \"599721f1-ec3e-4a83-b769-db5440b2f260\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.542610 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.552361 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-d6jvb"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.558495 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d6jvb" Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.593260 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d6jvb"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.598376 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvczk\" (UniqueName: \"kubernetes.io/projected/d7ed244e-4bce-4da9-b87c-27abaafbc934-kube-api-access-vvczk\") pod \"redhat-marketplace-d6jvb\" (UID: \"d7ed244e-4bce-4da9-b87c-27abaafbc934\") " pod="openshift-marketplace/redhat-marketplace-d6jvb" Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.598504 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7ed244e-4bce-4da9-b87c-27abaafbc934-utilities\") pod \"redhat-marketplace-d6jvb\" (UID: \"d7ed244e-4bce-4da9-b87c-27abaafbc934\") " pod="openshift-marketplace/redhat-marketplace-d6jvb" Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.598548 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7ed244e-4bce-4da9-b87c-27abaafbc934-catalog-content\") pod \"redhat-marketplace-d6jvb\" (UID: \"d7ed244e-4bce-4da9-b87c-27abaafbc934\") " pod="openshift-marketplace/redhat-marketplace-d6jvb" Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.602960 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-664664cb68-j6wsf"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.616355 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5777b4f897-qjpss"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.661146 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-kd8d5"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.683943 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-59578bc799-v664m"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.684363 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-57bb74c7bf-xpqj5"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.705774 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.705825 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-2g8g7"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.706823 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7ed244e-4bce-4da9-b87c-27abaafbc934-utilities\") pod \"redhat-marketplace-d6jvb\" (UID: \"d7ed244e-4bce-4da9-b87c-27abaafbc934\") " pod="openshift-marketplace/redhat-marketplace-d6jvb" Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.706852 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7ed244e-4bce-4da9-b87c-27abaafbc934-catalog-content\") pod \"redhat-marketplace-d6jvb\" (UID: \"d7ed244e-4bce-4da9-b87c-27abaafbc934\") " pod="openshift-marketplace/redhat-marketplace-d6jvb" Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.706902 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvczk\" (UniqueName: \"kubernetes.io/projected/d7ed244e-4bce-4da9-b87c-27abaafbc934-kube-api-access-vvczk\") pod \"redhat-marketplace-d6jvb\" (UID: \"d7ed244e-4bce-4da9-b87c-27abaafbc934\") " pod="openshift-marketplace/redhat-marketplace-d6jvb" Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.707635 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7ed244e-4bce-4da9-b87c-27abaafbc934-utilities\") pod \"redhat-marketplace-d6jvb\" (UID: \"d7ed244e-4bce-4da9-b87c-27abaafbc934\") " pod="openshift-marketplace/redhat-marketplace-d6jvb" Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.710877 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7ed244e-4bce-4da9-b87c-27abaafbc934-catalog-content\") pod \"redhat-marketplace-d6jvb\" (UID: \"d7ed244e-4bce-4da9-b87c-27abaafbc934\") " pod="openshift-marketplace/redhat-marketplace-d6jvb" Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.712404 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-869cc7797f-f2kxw"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.716408 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-ffcdd6c94-fcfzv"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.736949 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvczk\" (UniqueName: \"kubernetes.io/projected/d7ed244e-4bce-4da9-b87c-27abaafbc934-kube-api-access-vvczk\") pod \"redhat-marketplace-d6jvb\" (UID: \"d7ed244e-4bce-4da9-b87c-27abaafbc934\") " pod="openshift-marketplace/redhat-marketplace-d6jvb" Oct 13 21:24:29 crc kubenswrapper[4689]: E1013 21:24:29.764856 4689 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:33652e75a03a058769019fe8d8c51585a6eeefef5e1ecb96f9965434117954f2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tbgg7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-797d478b46-jlgcf_openstack-operators(5dc35208-04aa-4df8-af17-6ce8ad80199f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.857811 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-646675d848-t5zxf"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.905767 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.905808 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-578874c84d-n52xn"] Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.912819 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd"] Oct 13 21:24:29 crc kubenswrapper[4689]: W1013 21:24:29.914512 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04a373fa_1962_4bdc_8e26_53d557df6be3.slice/crio-9d53c706e3affe99779912284a0e3a336cddda596023b979bcd2f4996ff426af WatchSource:0}: Error finding container 9d53c706e3affe99779912284a0e3a336cddda596023b979bcd2f4996ff426af: Status 404 returned error can't find the container with id 9d53c706e3affe99779912284a0e3a336cddda596023b979bcd2f4996ff426af Oct 13 21:24:29 crc kubenswrapper[4689]: E1013 21:24:29.916401 4689 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-svvr7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl_openstack-operators(04a373fa-1962-4bdc-8e26-53d557df6be3): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 13 21:24:29 crc kubenswrapper[4689]: E1013 21:24:29.917749 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl" podUID="04a373fa-1962-4bdc-8e26-53d557df6be3" Oct 13 21:24:29 crc kubenswrapper[4689]: E1013 21:24:29.990607 4689 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:5cfb2ae1092445950b39dd59caa9a8c9367f42fb8353a8c3848d3bc729f24492,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hgn92,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-585fc5b659-v8wsd_openstack-operators(ff2d1098-a378-4314-8662-1dfb98c56aae): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 13 21:24:29 crc kubenswrapper[4689]: E1013 21:24:29.990754 4689 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:abe978f8da75223de5043cca50278ad4e28c8dd309883f502fe1e7a9998733b0,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-k29l8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-578874c84d-n52xn_openstack-operators(6c0d5f43-6334-41be-bb4f-9d538d40004a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 13 21:24:29 crc kubenswrapper[4689]: I1013 21:24:29.994442 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d6jvb" Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.013765 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/654be83b-acf2-4c39-b753-8f4cc7750052-cert\") pod \"openstack-operator-controller-manager-7fcd588594-tnfjj\" (UID: \"654be83b-acf2-4c39-b753-8f4cc7750052\") " pod="openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj" Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.023407 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/654be83b-acf2-4c39-b753-8f4cc7750052-cert\") pod \"openstack-operator-controller-manager-7fcd588594-tnfjj\" (UID: \"654be83b-acf2-4c39-b753-8f4cc7750052\") " pod="openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj" Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.163973 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7"] Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.284097 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-qjpss" event={"ID":"87d04908-37f4-42ab-8328-893b4e255767","Type":"ContainerStarted","Data":"8a3a0b7606f25dba04062adcc9a73f8fe7f5ad6a92c8d9741fbd9d0da1832914"} Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.287836 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj" Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.314459 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-59578bc799-v664m" event={"ID":"f3d70a25-802f-4d17-a250-3b76584ff7dc","Type":"ContainerStarted","Data":"9b2f199ebd68e3b7767d99be3d9b6ce64a98618a3051190e3f0e4a34ee05a6d9"} Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.327363 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-664664cb68-j6wsf" event={"ID":"261f1cfd-d8a7-4dea-baa7-3feb8f67813a","Type":"ContainerStarted","Data":"002fad90bcabde4bb0c4dd01463f7d5c6f092add28fec4cdbd4d5c0944a3ac67"} Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.350694 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-kd8d5" event={"ID":"bc1916e6-51d4-4ca9-b8a2-8be1659426a2","Type":"ContainerStarted","Data":"770c8a52e8d1e6303f6bb157f5370f3d52df9093cebd8ab936de12a847ebcf1d"} Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.369975 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-2g8g7" event={"ID":"3c9cfbce-22ae-4c0d-9b73-513bf285b4a0","Type":"ContainerStarted","Data":"bd8996b628a2f2b3765f3e8c365b69e137fde995614d903b5cc54d1282fe1e34"} Oct 13 21:24:30 crc kubenswrapper[4689]: E1013 21:24:30.409444 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf" podUID="5dc35208-04aa-4df8-af17-6ce8ad80199f" Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.412639 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl" event={"ID":"04a373fa-1962-4bdc-8e26-53d557df6be3","Type":"ContainerStarted","Data":"9d53c706e3affe99779912284a0e3a336cddda596023b979bcd2f4996ff426af"} Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.417175 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-f2kxw" event={"ID":"d34fac28-ebdd-4c77-ad9d-995611ee01d4","Type":"ContainerStarted","Data":"a118744bdd758433b76a6c9005c7e89bce088c63a1ddac249eca2eca5410729f"} Oct 13 21:24:30 crc kubenswrapper[4689]: E1013 21:24:30.422429 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl" podUID="04a373fa-1962-4bdc-8e26-53d557df6be3" Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.433192 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-xpqj5" event={"ID":"86e5e806-711e-4a41-9c65-0b121d0228e6","Type":"ContainerStarted","Data":"6e10b23d6a15b0664d802c9e86ecd7aa524fa93e6b018c77154d3af4e53443a7"} Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.436185 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" event={"ID":"ff2d1098-a378-4314-8662-1dfb98c56aae","Type":"ContainerStarted","Data":"a22ff8b47535972987f550d808c09b547b944b3f3707ff5a0df8373bd38edf09"} Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.438923 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-n52xn" event={"ID":"6c0d5f43-6334-41be-bb4f-9d538d40004a","Type":"ContainerStarted","Data":"0608157cbb36ff1f9857c84fcc5f8af1cb9851aad9a32047880dff358f48bbc7"} Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.444116 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf" event={"ID":"5dc35208-04aa-4df8-af17-6ce8ad80199f","Type":"ContainerStarted","Data":"75ebcb6c4fc75aaeb51f0d635c220c9446a52c3840fbcafe72e9bc68475df4f8"} Oct 13 21:24:30 crc kubenswrapper[4689]: E1013 21:24:30.459293 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:33652e75a03a058769019fe8d8c51585a6eeefef5e1ecb96f9965434117954f2\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf" podUID="5dc35208-04aa-4df8-af17-6ce8ad80199f" Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.475106 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-fcfzv" event={"ID":"d9a167f4-4f3c-44d9-9e18-7fdf79273d12","Type":"ContainerStarted","Data":"80d7e9b071b3cb161c547a7a4e81310feb07f05a3c5e74a254695773111b570b"} Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.480656 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-646675d848-t5zxf" event={"ID":"61a8f77a-a34b-4e04-b508-fc0fb8e7ede7","Type":"ContainerStarted","Data":"da87afdec5e3a2b1ca36fe2c5cf78d2857d0f301e6ba64756201681c49a45233"} Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.539444 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d6jvb"] Oct 13 21:24:30 crc kubenswrapper[4689]: E1013 21:24:30.540174 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-n52xn" podUID="6c0d5f43-6334-41be-bb4f-9d538d40004a" Oct 13 21:24:30 crc kubenswrapper[4689]: E1013 21:24:30.554508 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" podUID="ff2d1098-a378-4314-8662-1dfb98c56aae" Oct 13 21:24:30 crc kubenswrapper[4689]: I1013 21:24:30.903376 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj"] Oct 13 21:24:31 crc kubenswrapper[4689]: I1013 21:24:31.500100 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj" event={"ID":"654be83b-acf2-4c39-b753-8f4cc7750052","Type":"ContainerStarted","Data":"69cc185abbd82779d19c91ee3440999e8971ec48cd12629f2dde371bb9395303"} Oct 13 21:24:31 crc kubenswrapper[4689]: I1013 21:24:31.500179 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj" event={"ID":"654be83b-acf2-4c39-b753-8f4cc7750052","Type":"ContainerStarted","Data":"8100ea1d4aff5d72624edcbfe76d1bdadd79c8607035bf802815e7e455da20bc"} Oct 13 21:24:31 crc kubenswrapper[4689]: I1013 21:24:31.514135 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" event={"ID":"599721f1-ec3e-4a83-b769-db5440b2f260","Type":"ContainerStarted","Data":"927a530d522dc82e04a74e26b82899274403752e57c7d489f396dc30f7c65700"} Oct 13 21:24:31 crc kubenswrapper[4689]: I1013 21:24:31.538094 4689 generic.go:334] "Generic (PLEG): container finished" podID="d7ed244e-4bce-4da9-b87c-27abaafbc934" containerID="dfe59ffe3aa34df93c9965de5599ba0bbb72b0e619e63cd7355737a1e3eab3de" exitCode=0 Oct 13 21:24:31 crc kubenswrapper[4689]: I1013 21:24:31.538188 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d6jvb" event={"ID":"d7ed244e-4bce-4da9-b87c-27abaafbc934","Type":"ContainerDied","Data":"dfe59ffe3aa34df93c9965de5599ba0bbb72b0e619e63cd7355737a1e3eab3de"} Oct 13 21:24:31 crc kubenswrapper[4689]: I1013 21:24:31.538223 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d6jvb" event={"ID":"d7ed244e-4bce-4da9-b87c-27abaafbc934","Type":"ContainerStarted","Data":"65c4f417c5694ac106936006108c6eec06a7a13e291bf66c73903fd209f97e45"} Oct 13 21:24:31 crc kubenswrapper[4689]: I1013 21:24:31.566881 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" event={"ID":"ff2d1098-a378-4314-8662-1dfb98c56aae","Type":"ContainerStarted","Data":"2dc2ccfed62e4de0dd2ca393059008463b02adf2c955b58a64bbddc8a9886b8b"} Oct 13 21:24:31 crc kubenswrapper[4689]: E1013 21:24:31.571134 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:5cfb2ae1092445950b39dd59caa9a8c9367f42fb8353a8c3848d3bc729f24492\\\"\"" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" podUID="ff2d1098-a378-4314-8662-1dfb98c56aae" Oct 13 21:24:31 crc kubenswrapper[4689]: I1013 21:24:31.574954 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-n52xn" event={"ID":"6c0d5f43-6334-41be-bb4f-9d538d40004a","Type":"ContainerStarted","Data":"1c3273beb17e53cdbc0afb4e6ebca8ca746ed2b2ad4b394f5aea8c847df78a88"} Oct 13 21:24:31 crc kubenswrapper[4689]: E1013 21:24:31.576674 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:abe978f8da75223de5043cca50278ad4e28c8dd309883f502fe1e7a9998733b0\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-n52xn" podUID="6c0d5f43-6334-41be-bb4f-9d538d40004a" Oct 13 21:24:31 crc kubenswrapper[4689]: I1013 21:24:31.598375 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf" event={"ID":"5dc35208-04aa-4df8-af17-6ce8ad80199f","Type":"ContainerStarted","Data":"a9677e8f57d1ca75207663349f828fa79bc77923a491d904921c807fd279f11c"} Oct 13 21:24:31 crc kubenswrapper[4689]: E1013 21:24:31.611958 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl" podUID="04a373fa-1962-4bdc-8e26-53d557df6be3" Oct 13 21:24:31 crc kubenswrapper[4689]: E1013 21:24:31.612440 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:33652e75a03a058769019fe8d8c51585a6eeefef5e1ecb96f9965434117954f2\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf" podUID="5dc35208-04aa-4df8-af17-6ce8ad80199f" Oct 13 21:24:32 crc kubenswrapper[4689]: I1013 21:24:32.625296 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj" event={"ID":"654be83b-acf2-4c39-b753-8f4cc7750052","Type":"ContainerStarted","Data":"8d37bdf3535c1a592d265c2d8aa053f07efd5683d063fce36044d0708b91936a"} Oct 13 21:24:32 crc kubenswrapper[4689]: E1013 21:24:32.630049 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:abe978f8da75223de5043cca50278ad4e28c8dd309883f502fe1e7a9998733b0\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-n52xn" podUID="6c0d5f43-6334-41be-bb4f-9d538d40004a" Oct 13 21:24:32 crc kubenswrapper[4689]: E1013 21:24:32.630112 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:33652e75a03a058769019fe8d8c51585a6eeefef5e1ecb96f9965434117954f2\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf" podUID="5dc35208-04aa-4df8-af17-6ce8ad80199f" Oct 13 21:24:32 crc kubenswrapper[4689]: E1013 21:24:32.630187 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:5cfb2ae1092445950b39dd59caa9a8c9367f42fb8353a8c3848d3bc729f24492\\\"\"" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" podUID="ff2d1098-a378-4314-8662-1dfb98c56aae" Oct 13 21:24:32 crc kubenswrapper[4689]: I1013 21:24:32.711241 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj" podStartSLOduration=5.711221851 podStartE2EDuration="5.711221851s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:24:32.709723566 +0000 UTC m=+789.627968651" watchObservedRunningTime="2025-10-13 21:24:32.711221851 +0000 UTC m=+789.629466936" Oct 13 21:24:33 crc kubenswrapper[4689]: I1013 21:24:33.635362 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj" Oct 13 21:24:34 crc kubenswrapper[4689]: I1013 21:24:34.647820 4689 generic.go:334] "Generic (PLEG): container finished" podID="d7ed244e-4bce-4da9-b87c-27abaafbc934" containerID="ba815326d04561957cc286379170479e6b8986e6b593f3a86d453d21efadc3b3" exitCode=0 Oct 13 21:24:34 crc kubenswrapper[4689]: I1013 21:24:34.649085 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d6jvb" event={"ID":"d7ed244e-4bce-4da9-b87c-27abaafbc934","Type":"ContainerDied","Data":"ba815326d04561957cc286379170479e6b8986e6b593f3a86d453d21efadc3b3"} Oct 13 21:24:40 crc kubenswrapper[4689]: I1013 21:24:40.297640 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-7fcd588594-tnfjj" Oct 13 21:24:43 crc kubenswrapper[4689]: I1013 21:24:43.732526 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-646675d848-t5zxf" event={"ID":"61a8f77a-a34b-4e04-b508-fc0fb8e7ede7","Type":"ContainerStarted","Data":"c41d10c65df2b26d4aa1751e455336028fc93020639735dc250320acc92df238"} Oct 13 21:24:43 crc kubenswrapper[4689]: I1013 21:24:43.766002 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-w28f4" event={"ID":"574e8237-5b30-4af8-b93f-449d9ec98793","Type":"ContainerStarted","Data":"0bb8f0d4d7f88b23854db739464aa616dffbc0ec5ee1d14ef07040e1b6e4890f"} Oct 13 21:24:43 crc kubenswrapper[4689]: I1013 21:24:43.785218 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-59578bc799-v664m" event={"ID":"f3d70a25-802f-4d17-a250-3b76584ff7dc","Type":"ContainerStarted","Data":"3e7ef34fd596ee0f7cf000e3a3717658485ea749334f4e3957fa14b68d463deb"} Oct 13 21:24:43 crc kubenswrapper[4689]: I1013 21:24:43.828700 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-zm9b4" event={"ID":"2ed371e4-bae8-4320-9b6b-e28103137aee","Type":"ContainerStarted","Data":"963b134113e9ea4055eceb1b0ec2c86dfaced9fab5683b3ba41a9aefe6ad3809"} Oct 13 21:24:43 crc kubenswrapper[4689]: I1013 21:24:43.842954 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-7zs79" event={"ID":"de26ce24-8f8f-42e6-bd80-5331eb11f6b1","Type":"ContainerStarted","Data":"74099adcea3bb0a3c9a65a396940bd31e15e2f8d0a3f2dab59b4372333fa24b4"} Oct 13 21:24:43 crc kubenswrapper[4689]: I1013 21:24:43.859188 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-gg4tl" event={"ID":"97a56885-e550-415b-95be-3f61e0ac38e5","Type":"ContainerStarted","Data":"29b368dfd26a0f6c73244c256ea9b5819f8fc633339aae527d0a37f942fb2db9"} Oct 13 21:24:43 crc kubenswrapper[4689]: I1013 21:24:43.933157 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-8lb6r" event={"ID":"980922e5-08ec-418a-b207-f463195cc6da","Type":"ContainerStarted","Data":"bb0eae6da0d0f8a16db1e238b0b1f431cfab9b9f930557e362318ba7568526fa"} Oct 13 21:24:43 crc kubenswrapper[4689]: I1013 21:24:43.933189 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-pvsmw" event={"ID":"5f5620d8-6856-4b27-b74a-208edc1ec0d7","Type":"ContainerStarted","Data":"583c20335f23d0926a793893197a4c0773a4944adb0ca9df47fb8a18de4bba5f"} Oct 13 21:24:43 crc kubenswrapper[4689]: I1013 21:24:43.933199 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-pvsmw" event={"ID":"5f5620d8-6856-4b27-b74a-208edc1ec0d7","Type":"ContainerStarted","Data":"4f3b7ac334695918b4775c4309533f015699653137b4f05d8834d2017165071d"} Oct 13 21:24:43 crc kubenswrapper[4689]: I1013 21:24:43.933751 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-pvsmw" Oct 13 21:24:43 crc kubenswrapper[4689]: I1013 21:24:43.986818 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-pvsmw" podStartSLOduration=2.848232975 podStartE2EDuration="16.986792701s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:28.478052352 +0000 UTC m=+785.396297437" lastFinishedPulling="2025-10-13 21:24:42.616612078 +0000 UTC m=+799.534857163" observedRunningTime="2025-10-13 21:24:43.976311704 +0000 UTC m=+800.894556789" watchObservedRunningTime="2025-10-13 21:24:43.986792701 +0000 UTC m=+800.905037786" Oct 13 21:24:43 crc kubenswrapper[4689]: I1013 21:24:43.994798 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-xpqj5" event={"ID":"86e5e806-711e-4a41-9c65-0b121d0228e6","Type":"ContainerStarted","Data":"e6749e97e756176e127bacd6fb8a08e9325a56a9fd243ecd27ec7232e355636a"} Oct 13 21:24:44 crc kubenswrapper[4689]: I1013 21:24:44.022000 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" event={"ID":"599721f1-ec3e-4a83-b769-db5440b2f260","Type":"ContainerStarted","Data":"0f41987deaba48e9fec091beae0c8fed70cb97b01195e52de8fb91693a7924a3"} Oct 13 21:24:44 crc kubenswrapper[4689]: I1013 21:24:44.042117 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-664664cb68-j6wsf" event={"ID":"261f1cfd-d8a7-4dea-baa7-3feb8f67813a","Type":"ContainerStarted","Data":"26d31f93231e5f9a9d8b7500fec5a4b2596eaa35d6f5bafad73ee62d6bcb3c30"} Oct 13 21:24:44 crc kubenswrapper[4689]: I1013 21:24:44.048496 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-2g8g7" event={"ID":"3c9cfbce-22ae-4c0d-9b73-513bf285b4a0","Type":"ContainerStarted","Data":"46609f435040335deab47bc5b582e8390346672232a6d4e5dacebb73f1970189"} Oct 13 21:24:44 crc kubenswrapper[4689]: I1013 21:24:44.063488 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-f2kxw" event={"ID":"d34fac28-ebdd-4c77-ad9d-995611ee01d4","Type":"ContainerStarted","Data":"1a1099d4e94a28eb1bf55f5cb2e662490dd127c1fd411f23c33270d92181a492"} Oct 13 21:24:44 crc kubenswrapper[4689]: I1013 21:24:44.102532 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d6jvb" event={"ID":"d7ed244e-4bce-4da9-b87c-27abaafbc934","Type":"ContainerStarted","Data":"835d5edfb8ef46446742b465584238dc18dfab141e7bdc04f1abc54a06e6cacf"} Oct 13 21:24:44 crc kubenswrapper[4689]: I1013 21:24:44.115435 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ltx5l" event={"ID":"3bca3670-4880-4598-abbd-8ed51e351c5a","Type":"ContainerStarted","Data":"bb5eee57eab534e67812633188ae2423d2f2587043599d39634ad3a966f82716"} Oct 13 21:24:44 crc kubenswrapper[4689]: I1013 21:24:44.141598 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-d6jvb" podStartSLOduration=4.075017705 podStartE2EDuration="15.141556015s" podCreationTimestamp="2025-10-13 21:24:29 +0000 UTC" firstStartedPulling="2025-10-13 21:24:31.550124029 +0000 UTC m=+788.468369114" lastFinishedPulling="2025-10-13 21:24:42.616662329 +0000 UTC m=+799.534907424" observedRunningTime="2025-10-13 21:24:44.126259416 +0000 UTC m=+801.044504501" watchObservedRunningTime="2025-10-13 21:24:44.141556015 +0000 UTC m=+801.059801100" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.123664 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-gg4tl" event={"ID":"97a56885-e550-415b-95be-3f61e0ac38e5","Type":"ContainerStarted","Data":"e2e4cd5f8b49d9fb51332c1faf23f83293eac97330735c0a444e97e2f332b475"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.123997 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-gg4tl" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.129505 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ltx5l" event={"ID":"3bca3670-4880-4598-abbd-8ed51e351c5a","Type":"ContainerStarted","Data":"f02ce6c5595d0fd617f1f9b8773cd5a4b210d65e68a34ad880404970b8571de1"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.129635 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ltx5l" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.131525 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-kd8d5" event={"ID":"bc1916e6-51d4-4ca9-b8a2-8be1659426a2","Type":"ContainerStarted","Data":"da4c20adb52f6ca22efefc3ae67e6898841a2ba80eaee4b79b2804beff2cd4b1"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.131572 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-kd8d5" event={"ID":"bc1916e6-51d4-4ca9-b8a2-8be1659426a2","Type":"ContainerStarted","Data":"ffb2ed889b7c08f5675a8e3e5e8bfbde03b6756af2d7254f2d7ad460dc3b753e"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.131709 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-kd8d5" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.134083 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-2g8g7" event={"ID":"3c9cfbce-22ae-4c0d-9b73-513bf285b4a0","Type":"ContainerStarted","Data":"234a6d1aa350f16eb9ab9eb2aae7ac3008bb618192d0cf1b74fe598b9196ab71"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.134485 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-2g8g7" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.136775 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-mrv92" event={"ID":"4e3b3f49-bb44-4375-9bab-527a5e0e57a5","Type":"ContainerStarted","Data":"ef33916b1db2d9cf74260811c661abdf071d70913f899801453c2dcfe4fbf472"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.136800 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-mrv92" event={"ID":"4e3b3f49-bb44-4375-9bab-527a5e0e57a5","Type":"ContainerStarted","Data":"eedd41d60d99fbf9390ffe84ba9aec559eeea026e9b729d59e771c79545b77af"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.137129 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-mrv92" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.138549 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-qjpss" event={"ID":"87d04908-37f4-42ab-8328-893b4e255767","Type":"ContainerStarted","Data":"499253f1b10681ae9c15504097905c2fb06164020cfd0f2ea1d13f9139bc72d0"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.138576 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-qjpss" event={"ID":"87d04908-37f4-42ab-8328-893b4e255767","Type":"ContainerStarted","Data":"eabe03d931762f40dc9a9a15dddafa2081ed695a32a37358d6562d846bb7effd"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.138946 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-qjpss" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.140896 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-xpqj5" event={"ID":"86e5e806-711e-4a41-9c65-0b121d0228e6","Type":"ContainerStarted","Data":"ee0248d9dde021f280840d79dd49af18c32118f022ac83383f27e0c18ff209b2"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.141240 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-xpqj5" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.144756 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-fcfzv" event={"ID":"d9a167f4-4f3c-44d9-9e18-7fdf79273d12","Type":"ContainerStarted","Data":"e984b2624d829251c875e69c733af9826b9ee0ff26c53908e260b83e07ad944b"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.144781 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-fcfzv" event={"ID":"d9a167f4-4f3c-44d9-9e18-7fdf79273d12","Type":"ContainerStarted","Data":"78c9bbdf5dc8f640c9d9ab9a38b66b9d5ae070bab12d0598575f94fc5c5854cf"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.145140 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-fcfzv" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.147539 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-zm9b4" event={"ID":"2ed371e4-bae8-4320-9b6b-e28103137aee","Type":"ContainerStarted","Data":"b7d191946e89ab60ead7ae51a7ba048c88439f14d2524d85b153d99cfe35dcf3"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.147927 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-zm9b4" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.148845 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-gg4tl" podStartSLOduration=4.593404826 podStartE2EDuration="18.148829195s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.065397048 +0000 UTC m=+785.983642133" lastFinishedPulling="2025-10-13 21:24:42.620821417 +0000 UTC m=+799.539066502" observedRunningTime="2025-10-13 21:24:45.144430772 +0000 UTC m=+802.062675857" watchObservedRunningTime="2025-10-13 21:24:45.148829195 +0000 UTC m=+802.067074280" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.150772 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-8lb6r" event={"ID":"980922e5-08ec-418a-b207-f463195cc6da","Type":"ContainerStarted","Data":"d55eb27c21d77fe14652a81fffe69d678fe36816118352da5fc31801d3e44563"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.151127 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-8lb6r" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.153188 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-646675d848-t5zxf" event={"ID":"61a8f77a-a34b-4e04-b508-fc0fb8e7ede7","Type":"ContainerStarted","Data":"d358ec3de5857385c513bac3ced08d9c8430b693f13d37f9eafadde8fd367ac0"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.153262 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-646675d848-t5zxf" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.154880 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-w28f4" event={"ID":"574e8237-5b30-4af8-b93f-449d9ec98793","Type":"ContainerStarted","Data":"62cd8116119fffbe2a74cd04acac60b2fa7931e7d085660e960f8f1137d5876e"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.155237 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-w28f4" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.159365 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-7zs79" event={"ID":"de26ce24-8f8f-42e6-bd80-5331eb11f6b1","Type":"ContainerStarted","Data":"5d05f6a5a2d83c27c4b4bcb681a4ea4233697a97c4401b809eafa1b9fa33afae"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.159767 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-7zs79" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.160872 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-mrv92" podStartSLOduration=4.54462775 podStartE2EDuration="18.160863487s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.040306379 +0000 UTC m=+785.958551464" lastFinishedPulling="2025-10-13 21:24:42.656542106 +0000 UTC m=+799.574787201" observedRunningTime="2025-10-13 21:24:45.159960186 +0000 UTC m=+802.078205271" watchObservedRunningTime="2025-10-13 21:24:45.160863487 +0000 UTC m=+802.079108572" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.167279 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" event={"ID":"599721f1-ec3e-4a83-b769-db5440b2f260","Type":"ContainerStarted","Data":"687a54d7b93febb4fb31409b75cba231d8cb57326235778177bd616f42a50a84"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.168039 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.170251 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-f2kxw" event={"ID":"d34fac28-ebdd-4c77-ad9d-995611ee01d4","Type":"ContainerStarted","Data":"52e3df6035d99ef6ff299c95f7178db9da5ee618d2aa9e6d207a819a028294c7"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.170351 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-f2kxw" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.172392 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-59578bc799-v664m" event={"ID":"f3d70a25-802f-4d17-a250-3b76584ff7dc","Type":"ContainerStarted","Data":"4dd0ff1056151eb579491e2a90436d28a7968155203884cc4270454d79208562"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.172833 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-59578bc799-v664m" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.181084 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-664664cb68-j6wsf" event={"ID":"261f1cfd-d8a7-4dea-baa7-3feb8f67813a","Type":"ContainerStarted","Data":"303fc9d9432ef3edbe575ceb6c6c102aa9d370eb41dfbff841d87a6bcbacd585"} Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.181125 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-664664cb68-j6wsf" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.267763 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-qjpss" podStartSLOduration=5.290296644 podStartE2EDuration="18.267748898s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.728443291 +0000 UTC m=+786.646688376" lastFinishedPulling="2025-10-13 21:24:42.705895555 +0000 UTC m=+799.624140630" observedRunningTime="2025-10-13 21:24:45.265185688 +0000 UTC m=+802.183430763" watchObservedRunningTime="2025-10-13 21:24:45.267748898 +0000 UTC m=+802.185993983" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.269799 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ltx5l" podStartSLOduration=4.305131125 podStartE2EDuration="18.269790636s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:28.703485117 +0000 UTC m=+785.621730202" lastFinishedPulling="2025-10-13 21:24:42.668144628 +0000 UTC m=+799.586389713" observedRunningTime="2025-10-13 21:24:45.178529172 +0000 UTC m=+802.096774257" watchObservedRunningTime="2025-10-13 21:24:45.269790636 +0000 UTC m=+802.188035731" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.355158 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-xpqj5" podStartSLOduration=5.392374042 podStartE2EDuration="18.35514261s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.705620726 +0000 UTC m=+786.623865811" lastFinishedPulling="2025-10-13 21:24:42.668389294 +0000 UTC m=+799.586634379" observedRunningTime="2025-10-13 21:24:45.341461979 +0000 UTC m=+802.259707064" watchObservedRunningTime="2025-10-13 21:24:45.35514261 +0000 UTC m=+802.273387695" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.358285 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-kd8d5" podStartSLOduration=5.27949075 podStartE2EDuration="18.358275534s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.642996684 +0000 UTC m=+786.561241769" lastFinishedPulling="2025-10-13 21:24:42.721781468 +0000 UTC m=+799.640026553" observedRunningTime="2025-10-13 21:24:45.311103236 +0000 UTC m=+802.229348321" watchObservedRunningTime="2025-10-13 21:24:45.358275534 +0000 UTC m=+802.276520619" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.392202 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-2g8g7" podStartSLOduration=5.419486639 podStartE2EDuration="18.392176011s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.733537901 +0000 UTC m=+786.651782986" lastFinishedPulling="2025-10-13 21:24:42.706227273 +0000 UTC m=+799.624472358" observedRunningTime="2025-10-13 21:24:45.383560138 +0000 UTC m=+802.301805223" watchObservedRunningTime="2025-10-13 21:24:45.392176011 +0000 UTC m=+802.310421096" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.433556 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-fcfzv" podStartSLOduration=5.448528431 podStartE2EDuration="18.433540972s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.734206687 +0000 UTC m=+786.652451772" lastFinishedPulling="2025-10-13 21:24:42.719219228 +0000 UTC m=+799.637464313" observedRunningTime="2025-10-13 21:24:45.427961981 +0000 UTC m=+802.346207066" watchObservedRunningTime="2025-10-13 21:24:45.433540972 +0000 UTC m=+802.351786057" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.465470 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-zm9b4" podStartSLOduration=4.943392565 podStartE2EDuration="18.465449521s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.180707146 +0000 UTC m=+786.098952231" lastFinishedPulling="2025-10-13 21:24:42.702764102 +0000 UTC m=+799.621009187" observedRunningTime="2025-10-13 21:24:45.461469458 +0000 UTC m=+802.379714553" watchObservedRunningTime="2025-10-13 21:24:45.465449521 +0000 UTC m=+802.383694596" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.517358 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-59578bc799-v664m" podStartSLOduration=5.63028427 podStartE2EDuration="18.51733629s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.728894003 +0000 UTC m=+786.647139088" lastFinishedPulling="2025-10-13 21:24:42.615946023 +0000 UTC m=+799.534191108" observedRunningTime="2025-10-13 21:24:45.502627384 +0000 UTC m=+802.420872469" watchObservedRunningTime="2025-10-13 21:24:45.51733629 +0000 UTC m=+802.435581375" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.560568 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-664664cb68-j6wsf" podStartSLOduration=5.568277714 podStartE2EDuration="18.560551025s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.628957725 +0000 UTC m=+786.547202810" lastFinishedPulling="2025-10-13 21:24:42.621230996 +0000 UTC m=+799.539476121" observedRunningTime="2025-10-13 21:24:45.550998671 +0000 UTC m=+802.469243746" watchObservedRunningTime="2025-10-13 21:24:45.560551025 +0000 UTC m=+802.478796110" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.622918 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" podStartSLOduration=6.2569014880000005 podStartE2EDuration="18.622898349s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:30.282861954 +0000 UTC m=+787.201107039" lastFinishedPulling="2025-10-13 21:24:42.648858785 +0000 UTC m=+799.567103900" observedRunningTime="2025-10-13 21:24:45.593190952 +0000 UTC m=+802.511436037" watchObservedRunningTime="2025-10-13 21:24:45.622898349 +0000 UTC m=+802.541143434" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.623056 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-8lb6r" podStartSLOduration=4.997104648 podStartE2EDuration="18.623051693s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.075607428 +0000 UTC m=+785.993852513" lastFinishedPulling="2025-10-13 21:24:42.701554473 +0000 UTC m=+799.619799558" observedRunningTime="2025-10-13 21:24:45.619769706 +0000 UTC m=+802.538014791" watchObservedRunningTime="2025-10-13 21:24:45.623051693 +0000 UTC m=+802.541296778" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.646524 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-7zs79" podStartSLOduration=5.119892372 podStartE2EDuration="18.646504324s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.18040993 +0000 UTC m=+786.098655015" lastFinishedPulling="2025-10-13 21:24:42.707021882 +0000 UTC m=+799.625266967" observedRunningTime="2025-10-13 21:24:45.645222933 +0000 UTC m=+802.563468018" watchObservedRunningTime="2025-10-13 21:24:45.646504324 +0000 UTC m=+802.564749409" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.664139 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-646675d848-t5zxf" podStartSLOduration=5.91936859 podStartE2EDuration="18.664120608s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.904121608 +0000 UTC m=+786.822366693" lastFinishedPulling="2025-10-13 21:24:42.648873636 +0000 UTC m=+799.567118711" observedRunningTime="2025-10-13 21:24:45.663113654 +0000 UTC m=+802.581358739" watchObservedRunningTime="2025-10-13 21:24:45.664120608 +0000 UTC m=+802.582365693" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.680624 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-w28f4" podStartSLOduration=4.665782366 podStartE2EDuration="18.680606215s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:28.653812561 +0000 UTC m=+785.572057646" lastFinishedPulling="2025-10-13 21:24:42.66863641 +0000 UTC m=+799.586881495" observedRunningTime="2025-10-13 21:24:45.678489505 +0000 UTC m=+802.596734600" watchObservedRunningTime="2025-10-13 21:24:45.680606215 +0000 UTC m=+802.598851300" Oct 13 21:24:45 crc kubenswrapper[4689]: I1013 21:24:45.708827 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-f2kxw" podStartSLOduration=5.738780598 podStartE2EDuration="18.708808968s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.73392871 +0000 UTC m=+786.652173785" lastFinishedPulling="2025-10-13 21:24:42.70395707 +0000 UTC m=+799.622202155" observedRunningTime="2025-10-13 21:24:45.701782243 +0000 UTC m=+802.620027328" watchObservedRunningTime="2025-10-13 21:24:45.708808968 +0000 UTC m=+802.627054053" Oct 13 21:24:47 crc kubenswrapper[4689]: I1013 21:24:47.204412 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf" event={"ID":"5dc35208-04aa-4df8-af17-6ce8ad80199f","Type":"ContainerStarted","Data":"10f95bb253d3ece7d8489ff7059ec606f97d05374b917fcf8ddcfe4ccf7fbfd0"} Oct 13 21:24:47 crc kubenswrapper[4689]: I1013 21:24:47.209350 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf" Oct 13 21:24:47 crc kubenswrapper[4689]: I1013 21:24:47.213359 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-59578bc799-v664m" Oct 13 21:24:47 crc kubenswrapper[4689]: I1013 21:24:47.221322 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf" podStartSLOduration=3.457361233 podStartE2EDuration="20.221302943s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.764666712 +0000 UTC m=+786.682911797" lastFinishedPulling="2025-10-13 21:24:46.528608422 +0000 UTC m=+803.446853507" observedRunningTime="2025-10-13 21:24:47.221033466 +0000 UTC m=+804.139278581" watchObservedRunningTime="2025-10-13 21:24:47.221302943 +0000 UTC m=+804.139548018" Oct 13 21:24:48 crc kubenswrapper[4689]: I1013 21:24:48.071336 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-664664cb68-j6wsf" Oct 13 21:24:48 crc kubenswrapper[4689]: I1013 21:24:48.123657 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-2g8g7" Oct 13 21:24:48 crc kubenswrapper[4689]: I1013 21:24:48.486306 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-fcfzv" Oct 13 21:24:48 crc kubenswrapper[4689]: I1013 21:24:48.571377 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-646675d848-t5zxf" Oct 13 21:24:49 crc kubenswrapper[4689]: I1013 21:24:49.237745 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" event={"ID":"ff2d1098-a378-4314-8662-1dfb98c56aae","Type":"ContainerStarted","Data":"e45e02870e651eb3fef719ac13bc48384e24e89cd03a3c3566b42802b7c307b1"} Oct 13 21:24:49 crc kubenswrapper[4689]: I1013 21:24:49.238524 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" Oct 13 21:24:49 crc kubenswrapper[4689]: I1013 21:24:49.243938 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl" event={"ID":"04a373fa-1962-4bdc-8e26-53d557df6be3","Type":"ContainerStarted","Data":"385b8e594465da314c55694a0a1c05f09a83a4abea7c27a2dd5bc497d7be3d05"} Oct 13 21:24:49 crc kubenswrapper[4689]: I1013 21:24:49.263691 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" podStartSLOduration=4.106157571 podStartE2EDuration="22.263658064s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.990450755 +0000 UTC m=+786.908695840" lastFinishedPulling="2025-10-13 21:24:48.147951248 +0000 UTC m=+805.066196333" observedRunningTime="2025-10-13 21:24:49.257239532 +0000 UTC m=+806.175484627" watchObservedRunningTime="2025-10-13 21:24:49.263658064 +0000 UTC m=+806.181903159" Oct 13 21:24:49 crc kubenswrapper[4689]: I1013 21:24:49.292360 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl" podStartSLOduration=3.067249847 podStartE2EDuration="21.292320856s" podCreationTimestamp="2025-10-13 21:24:28 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.916268543 +0000 UTC m=+786.834513628" lastFinishedPulling="2025-10-13 21:24:48.141339552 +0000 UTC m=+805.059584637" observedRunningTime="2025-10-13 21:24:49.281969044 +0000 UTC m=+806.200214129" watchObservedRunningTime="2025-10-13 21:24:49.292320856 +0000 UTC m=+806.210565961" Oct 13 21:24:49 crc kubenswrapper[4689]: I1013 21:24:49.553521 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7" Oct 13 21:24:49 crc kubenswrapper[4689]: I1013 21:24:49.995186 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-d6jvb" Oct 13 21:24:49 crc kubenswrapper[4689]: I1013 21:24:49.995962 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-d6jvb" Oct 13 21:24:50 crc kubenswrapper[4689]: I1013 21:24:50.052989 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-d6jvb" Oct 13 21:24:50 crc kubenswrapper[4689]: I1013 21:24:50.256116 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-n52xn" event={"ID":"6c0d5f43-6334-41be-bb4f-9d538d40004a","Type":"ContainerStarted","Data":"4fed224eab33d4f425231c071623e5cc9c0cf1dd0467a91c4bd52ba309ead36a"} Oct 13 21:24:50 crc kubenswrapper[4689]: I1013 21:24:50.276107 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-n52xn" podStartSLOduration=4.192414387 podStartE2EDuration="23.276088443s" podCreationTimestamp="2025-10-13 21:24:27 +0000 UTC" firstStartedPulling="2025-10-13 21:24:29.990537307 +0000 UTC m=+786.908782392" lastFinishedPulling="2025-10-13 21:24:49.074211363 +0000 UTC m=+805.992456448" observedRunningTime="2025-10-13 21:24:50.27256672 +0000 UTC m=+807.190811825" watchObservedRunningTime="2025-10-13 21:24:50.276088443 +0000 UTC m=+807.194333538" Oct 13 21:24:50 crc kubenswrapper[4689]: I1013 21:24:50.348803 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-d6jvb" Oct 13 21:24:50 crc kubenswrapper[4689]: I1013 21:24:50.405033 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d6jvb"] Oct 13 21:24:52 crc kubenswrapper[4689]: I1013 21:24:52.267970 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-d6jvb" podUID="d7ed244e-4bce-4da9-b87c-27abaafbc934" containerName="registry-server" containerID="cri-o://835d5edfb8ef46446742b465584238dc18dfab141e7bdc04f1abc54a06e6cacf" gracePeriod=2 Oct 13 21:24:53 crc kubenswrapper[4689]: I1013 21:24:53.278800 4689 generic.go:334] "Generic (PLEG): container finished" podID="d7ed244e-4bce-4da9-b87c-27abaafbc934" containerID="835d5edfb8ef46446742b465584238dc18dfab141e7bdc04f1abc54a06e6cacf" exitCode=0 Oct 13 21:24:53 crc kubenswrapper[4689]: I1013 21:24:53.278868 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d6jvb" event={"ID":"d7ed244e-4bce-4da9-b87c-27abaafbc934","Type":"ContainerDied","Data":"835d5edfb8ef46446742b465584238dc18dfab141e7bdc04f1abc54a06e6cacf"} Oct 13 21:24:53 crc kubenswrapper[4689]: I1013 21:24:53.858977 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:24:53 crc kubenswrapper[4689]: I1013 21:24:53.859076 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:24:56 crc kubenswrapper[4689]: I1013 21:24:56.303284 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d6jvb" event={"ID":"d7ed244e-4bce-4da9-b87c-27abaafbc934","Type":"ContainerDied","Data":"65c4f417c5694ac106936006108c6eec06a7a13e291bf66c73903fd209f97e45"} Oct 13 21:24:56 crc kubenswrapper[4689]: I1013 21:24:56.304104 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65c4f417c5694ac106936006108c6eec06a7a13e291bf66c73903fd209f97e45" Oct 13 21:24:56 crc kubenswrapper[4689]: I1013 21:24:56.352064 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d6jvb" Oct 13 21:24:56 crc kubenswrapper[4689]: I1013 21:24:56.550389 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvczk\" (UniqueName: \"kubernetes.io/projected/d7ed244e-4bce-4da9-b87c-27abaafbc934-kube-api-access-vvczk\") pod \"d7ed244e-4bce-4da9-b87c-27abaafbc934\" (UID: \"d7ed244e-4bce-4da9-b87c-27abaafbc934\") " Oct 13 21:24:56 crc kubenswrapper[4689]: I1013 21:24:56.550458 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7ed244e-4bce-4da9-b87c-27abaafbc934-catalog-content\") pod \"d7ed244e-4bce-4da9-b87c-27abaafbc934\" (UID: \"d7ed244e-4bce-4da9-b87c-27abaafbc934\") " Oct 13 21:24:56 crc kubenswrapper[4689]: I1013 21:24:56.550498 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7ed244e-4bce-4da9-b87c-27abaafbc934-utilities\") pod \"d7ed244e-4bce-4da9-b87c-27abaafbc934\" (UID: \"d7ed244e-4bce-4da9-b87c-27abaafbc934\") " Oct 13 21:24:56 crc kubenswrapper[4689]: I1013 21:24:56.552418 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7ed244e-4bce-4da9-b87c-27abaafbc934-utilities" (OuterVolumeSpecName: "utilities") pod "d7ed244e-4bce-4da9-b87c-27abaafbc934" (UID: "d7ed244e-4bce-4da9-b87c-27abaafbc934"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:24:56 crc kubenswrapper[4689]: I1013 21:24:56.557796 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7ed244e-4bce-4da9-b87c-27abaafbc934-kube-api-access-vvczk" (OuterVolumeSpecName: "kube-api-access-vvczk") pod "d7ed244e-4bce-4da9-b87c-27abaafbc934" (UID: "d7ed244e-4bce-4da9-b87c-27abaafbc934"). InnerVolumeSpecName "kube-api-access-vvczk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:24:56 crc kubenswrapper[4689]: I1013 21:24:56.575183 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7ed244e-4bce-4da9-b87c-27abaafbc934-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d7ed244e-4bce-4da9-b87c-27abaafbc934" (UID: "d7ed244e-4bce-4da9-b87c-27abaafbc934"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:24:56 crc kubenswrapper[4689]: I1013 21:24:56.652095 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvczk\" (UniqueName: \"kubernetes.io/projected/d7ed244e-4bce-4da9-b87c-27abaafbc934-kube-api-access-vvczk\") on node \"crc\" DevicePath \"\"" Oct 13 21:24:56 crc kubenswrapper[4689]: I1013 21:24:56.652140 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d7ed244e-4bce-4da9-b87c-27abaafbc934-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:24:56 crc kubenswrapper[4689]: I1013 21:24:56.652152 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d7ed244e-4bce-4da9-b87c-27abaafbc934-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:24:57 crc kubenswrapper[4689]: I1013 21:24:57.311298 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d6jvb" Oct 13 21:24:57 crc kubenswrapper[4689]: I1013 21:24:57.356111 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d6jvb"] Oct 13 21:24:57 crc kubenswrapper[4689]: I1013 21:24:57.363138 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-d6jvb"] Oct 13 21:24:57 crc kubenswrapper[4689]: I1013 21:24:57.572507 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-pvsmw" Oct 13 21:24:57 crc kubenswrapper[4689]: I1013 21:24:57.583088 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-w28f4" Oct 13 21:24:57 crc kubenswrapper[4689]: I1013 21:24:57.614256 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ltx5l" Oct 13 21:24:57 crc kubenswrapper[4689]: I1013 21:24:57.629315 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-zm9b4" Oct 13 21:24:57 crc kubenswrapper[4689]: I1013 21:24:57.644840 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-8lb6r" Oct 13 21:24:57 crc kubenswrapper[4689]: I1013 21:24:57.667475 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-gg4tl" Oct 13 21:24:57 crc kubenswrapper[4689]: I1013 21:24:57.761412 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-mrv92" Oct 13 21:24:57 crc kubenswrapper[4689]: I1013 21:24:57.792827 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-7zs79" Oct 13 21:24:57 crc kubenswrapper[4689]: I1013 21:24:57.877470 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7ed244e-4bce-4da9-b87c-27abaafbc934" path="/var/lib/kubelet/pods/d7ed244e-4bce-4da9-b87c-27abaafbc934/volumes" Oct 13 21:24:57 crc kubenswrapper[4689]: I1013 21:24:57.916672 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-qjpss" Oct 13 21:24:57 crc kubenswrapper[4689]: I1013 21:24:57.931508 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-jlgcf" Oct 13 21:24:57 crc kubenswrapper[4689]: I1013 21:24:57.952098 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-xpqj5" Oct 13 21:24:57 crc kubenswrapper[4689]: I1013 21:24:57.974389 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-kd8d5" Oct 13 21:24:57 crc kubenswrapper[4689]: I1013 21:24:57.998860 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-f2kxw" Oct 13 21:24:58 crc kubenswrapper[4689]: I1013 21:24:58.407995 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-n52xn" Oct 13 21:24:58 crc kubenswrapper[4689]: I1013 21:24:58.410236 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-n52xn" Oct 13 21:24:59 crc kubenswrapper[4689]: I1013 21:24:59.204228 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-v8wsd" Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.090366 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gdx6w"] Oct 13 21:25:08 crc kubenswrapper[4689]: E1013 21:25:08.091571 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7ed244e-4bce-4da9-b87c-27abaafbc934" containerName="extract-content" Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.091614 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7ed244e-4bce-4da9-b87c-27abaafbc934" containerName="extract-content" Oct 13 21:25:08 crc kubenswrapper[4689]: E1013 21:25:08.091650 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7ed244e-4bce-4da9-b87c-27abaafbc934" containerName="extract-utilities" Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.091663 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7ed244e-4bce-4da9-b87c-27abaafbc934" containerName="extract-utilities" Oct 13 21:25:08 crc kubenswrapper[4689]: E1013 21:25:08.091683 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7ed244e-4bce-4da9-b87c-27abaafbc934" containerName="registry-server" Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.091694 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7ed244e-4bce-4da9-b87c-27abaafbc934" containerName="registry-server" Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.091952 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7ed244e-4bce-4da9-b87c-27abaafbc934" containerName="registry-server" Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.093921 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gdx6w" Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.099841 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gdx6w"] Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.214518 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-catalog-content\") pod \"certified-operators-gdx6w\" (UID: \"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f\") " pod="openshift-marketplace/certified-operators-gdx6w" Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.214581 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8g84\" (UniqueName: \"kubernetes.io/projected/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-kube-api-access-v8g84\") pod \"certified-operators-gdx6w\" (UID: \"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f\") " pod="openshift-marketplace/certified-operators-gdx6w" Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.214817 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-utilities\") pod \"certified-operators-gdx6w\" (UID: \"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f\") " pod="openshift-marketplace/certified-operators-gdx6w" Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.316462 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-catalog-content\") pod \"certified-operators-gdx6w\" (UID: \"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f\") " pod="openshift-marketplace/certified-operators-gdx6w" Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.316541 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8g84\" (UniqueName: \"kubernetes.io/projected/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-kube-api-access-v8g84\") pod \"certified-operators-gdx6w\" (UID: \"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f\") " pod="openshift-marketplace/certified-operators-gdx6w" Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.316616 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-utilities\") pod \"certified-operators-gdx6w\" (UID: \"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f\") " pod="openshift-marketplace/certified-operators-gdx6w" Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.317087 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-catalog-content\") pod \"certified-operators-gdx6w\" (UID: \"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f\") " pod="openshift-marketplace/certified-operators-gdx6w" Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.317195 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-utilities\") pod \"certified-operators-gdx6w\" (UID: \"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f\") " pod="openshift-marketplace/certified-operators-gdx6w" Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.338196 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8g84\" (UniqueName: \"kubernetes.io/projected/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-kube-api-access-v8g84\") pod \"certified-operators-gdx6w\" (UID: \"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f\") " pod="openshift-marketplace/certified-operators-gdx6w" Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.429006 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gdx6w" Oct 13 21:25:08 crc kubenswrapper[4689]: W1013 21:25:08.956847 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ec9bc5b_ec92_4133_a24b_cf5c6760d39f.slice/crio-53b630d216d16d640236c38ee9c7b03376a6c60fbb416c07f4daa57721a2bd4f WatchSource:0}: Error finding container 53b630d216d16d640236c38ee9c7b03376a6c60fbb416c07f4daa57721a2bd4f: Status 404 returned error can't find the container with id 53b630d216d16d640236c38ee9c7b03376a6c60fbb416c07f4daa57721a2bd4f Oct 13 21:25:08 crc kubenswrapper[4689]: I1013 21:25:08.969260 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gdx6w"] Oct 13 21:25:09 crc kubenswrapper[4689]: I1013 21:25:09.401004 4689 generic.go:334] "Generic (PLEG): container finished" podID="7ec9bc5b-ec92-4133-a24b-cf5c6760d39f" containerID="e46b0d3436ce84c29cab8763cb9996c6ab0c4ac43868f950f0ad72e3fe966758" exitCode=0 Oct 13 21:25:09 crc kubenswrapper[4689]: I1013 21:25:09.401089 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gdx6w" event={"ID":"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f","Type":"ContainerDied","Data":"e46b0d3436ce84c29cab8763cb9996c6ab0c4ac43868f950f0ad72e3fe966758"} Oct 13 21:25:09 crc kubenswrapper[4689]: I1013 21:25:09.401266 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gdx6w" event={"ID":"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f","Type":"ContainerStarted","Data":"53b630d216d16d640236c38ee9c7b03376a6c60fbb416c07f4daa57721a2bd4f"} Oct 13 21:25:10 crc kubenswrapper[4689]: I1013 21:25:10.411857 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gdx6w" event={"ID":"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f","Type":"ContainerStarted","Data":"e83c42a745f783e1e89132f4602018d71779ee36def40989a3cac6a7cb928517"} Oct 13 21:25:11 crc kubenswrapper[4689]: I1013 21:25:11.422505 4689 generic.go:334] "Generic (PLEG): container finished" podID="7ec9bc5b-ec92-4133-a24b-cf5c6760d39f" containerID="e83c42a745f783e1e89132f4602018d71779ee36def40989a3cac6a7cb928517" exitCode=0 Oct 13 21:25:11 crc kubenswrapper[4689]: I1013 21:25:11.422648 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gdx6w" event={"ID":"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f","Type":"ContainerDied","Data":"e83c42a745f783e1e89132f4602018d71779ee36def40989a3cac6a7cb928517"} Oct 13 21:25:11 crc kubenswrapper[4689]: I1013 21:25:11.848830 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bdjfj"] Oct 13 21:25:11 crc kubenswrapper[4689]: I1013 21:25:11.851283 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bdjfj" Oct 13 21:25:11 crc kubenswrapper[4689]: I1013 21:25:11.856478 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bdjfj"] Oct 13 21:25:11 crc kubenswrapper[4689]: I1013 21:25:11.974894 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvlvc\" (UniqueName: \"kubernetes.io/projected/435c993a-77b5-4e4d-b481-a1c79eed8c58-kube-api-access-xvlvc\") pod \"community-operators-bdjfj\" (UID: \"435c993a-77b5-4e4d-b481-a1c79eed8c58\") " pod="openshift-marketplace/community-operators-bdjfj" Oct 13 21:25:11 crc kubenswrapper[4689]: I1013 21:25:11.974961 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/435c993a-77b5-4e4d-b481-a1c79eed8c58-utilities\") pod \"community-operators-bdjfj\" (UID: \"435c993a-77b5-4e4d-b481-a1c79eed8c58\") " pod="openshift-marketplace/community-operators-bdjfj" Oct 13 21:25:11 crc kubenswrapper[4689]: I1013 21:25:11.975027 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/435c993a-77b5-4e4d-b481-a1c79eed8c58-catalog-content\") pod \"community-operators-bdjfj\" (UID: \"435c993a-77b5-4e4d-b481-a1c79eed8c58\") " pod="openshift-marketplace/community-operators-bdjfj" Oct 13 21:25:12 crc kubenswrapper[4689]: I1013 21:25:12.076851 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvlvc\" (UniqueName: \"kubernetes.io/projected/435c993a-77b5-4e4d-b481-a1c79eed8c58-kube-api-access-xvlvc\") pod \"community-operators-bdjfj\" (UID: \"435c993a-77b5-4e4d-b481-a1c79eed8c58\") " pod="openshift-marketplace/community-operators-bdjfj" Oct 13 21:25:12 crc kubenswrapper[4689]: I1013 21:25:12.076955 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/435c993a-77b5-4e4d-b481-a1c79eed8c58-utilities\") pod \"community-operators-bdjfj\" (UID: \"435c993a-77b5-4e4d-b481-a1c79eed8c58\") " pod="openshift-marketplace/community-operators-bdjfj" Oct 13 21:25:12 crc kubenswrapper[4689]: I1013 21:25:12.076990 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/435c993a-77b5-4e4d-b481-a1c79eed8c58-catalog-content\") pod \"community-operators-bdjfj\" (UID: \"435c993a-77b5-4e4d-b481-a1c79eed8c58\") " pod="openshift-marketplace/community-operators-bdjfj" Oct 13 21:25:12 crc kubenswrapper[4689]: I1013 21:25:12.077632 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/435c993a-77b5-4e4d-b481-a1c79eed8c58-catalog-content\") pod \"community-operators-bdjfj\" (UID: \"435c993a-77b5-4e4d-b481-a1c79eed8c58\") " pod="openshift-marketplace/community-operators-bdjfj" Oct 13 21:25:12 crc kubenswrapper[4689]: I1013 21:25:12.077802 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/435c993a-77b5-4e4d-b481-a1c79eed8c58-utilities\") pod \"community-operators-bdjfj\" (UID: \"435c993a-77b5-4e4d-b481-a1c79eed8c58\") " pod="openshift-marketplace/community-operators-bdjfj" Oct 13 21:25:12 crc kubenswrapper[4689]: I1013 21:25:12.097650 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvlvc\" (UniqueName: \"kubernetes.io/projected/435c993a-77b5-4e4d-b481-a1c79eed8c58-kube-api-access-xvlvc\") pod \"community-operators-bdjfj\" (UID: \"435c993a-77b5-4e4d-b481-a1c79eed8c58\") " pod="openshift-marketplace/community-operators-bdjfj" Oct 13 21:25:12 crc kubenswrapper[4689]: I1013 21:25:12.170490 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bdjfj" Oct 13 21:25:12 crc kubenswrapper[4689]: I1013 21:25:12.435827 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gdx6w" event={"ID":"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f","Type":"ContainerStarted","Data":"5ea9553b60df61c7fa1ee2c9d735f6ae43c5fb24a0ef4d38c317e678b834ba85"} Oct 13 21:25:12 crc kubenswrapper[4689]: I1013 21:25:12.462581 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gdx6w" podStartSLOduration=1.934398035 podStartE2EDuration="4.462561887s" podCreationTimestamp="2025-10-13 21:25:08 +0000 UTC" firstStartedPulling="2025-10-13 21:25:09.40285177 +0000 UTC m=+826.321096855" lastFinishedPulling="2025-10-13 21:25:11.931015622 +0000 UTC m=+828.849260707" observedRunningTime="2025-10-13 21:25:12.456128725 +0000 UTC m=+829.374373810" watchObservedRunningTime="2025-10-13 21:25:12.462561887 +0000 UTC m=+829.380806972" Oct 13 21:25:12 crc kubenswrapper[4689]: I1013 21:25:12.700310 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bdjfj"] Oct 13 21:25:13 crc kubenswrapper[4689]: I1013 21:25:13.444818 4689 generic.go:334] "Generic (PLEG): container finished" podID="435c993a-77b5-4e4d-b481-a1c79eed8c58" containerID="c0892ea75e1537e4dc7fd71803e0da852eb8ff55fab1e9d501957b047d5a6bfd" exitCode=0 Oct 13 21:25:13 crc kubenswrapper[4689]: I1013 21:25:13.444850 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdjfj" event={"ID":"435c993a-77b5-4e4d-b481-a1c79eed8c58","Type":"ContainerDied","Data":"c0892ea75e1537e4dc7fd71803e0da852eb8ff55fab1e9d501957b047d5a6bfd"} Oct 13 21:25:13 crc kubenswrapper[4689]: I1013 21:25:13.444886 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdjfj" event={"ID":"435c993a-77b5-4e4d-b481-a1c79eed8c58","Type":"ContainerStarted","Data":"6479f7bb4032ca381bcf2954ad3b8813e6201eb4fb9fe8c74db25d16913ea8c3"} Oct 13 21:25:15 crc kubenswrapper[4689]: I1013 21:25:15.478747 4689 generic.go:334] "Generic (PLEG): container finished" podID="435c993a-77b5-4e4d-b481-a1c79eed8c58" containerID="5cf56d9271bee60f74cb6560c1713464676fbd0ab3924d9b78f48cd88bb92ed3" exitCode=0 Oct 13 21:25:15 crc kubenswrapper[4689]: I1013 21:25:15.480694 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdjfj" event={"ID":"435c993a-77b5-4e4d-b481-a1c79eed8c58","Type":"ContainerDied","Data":"5cf56d9271bee60f74cb6560c1713464676fbd0ab3924d9b78f48cd88bb92ed3"} Oct 13 21:25:16 crc kubenswrapper[4689]: I1013 21:25:16.491013 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdjfj" event={"ID":"435c993a-77b5-4e4d-b481-a1c79eed8c58","Type":"ContainerStarted","Data":"97a5e834b82a4c10bded9c463df3796d0ab43bc3cafce6b42bb1cc835161e4d8"} Oct 13 21:25:16 crc kubenswrapper[4689]: I1013 21:25:16.509298 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bdjfj" podStartSLOduration=2.920777256 podStartE2EDuration="5.509277965s" podCreationTimestamp="2025-10-13 21:25:11 +0000 UTC" firstStartedPulling="2025-10-13 21:25:13.447367997 +0000 UTC m=+830.365613102" lastFinishedPulling="2025-10-13 21:25:16.035868726 +0000 UTC m=+832.954113811" observedRunningTime="2025-10-13 21:25:16.504939663 +0000 UTC m=+833.423184788" watchObservedRunningTime="2025-10-13 21:25:16.509277965 +0000 UTC m=+833.427523050" Oct 13 21:25:18 crc kubenswrapper[4689]: I1013 21:25:18.429416 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gdx6w" Oct 13 21:25:18 crc kubenswrapper[4689]: I1013 21:25:18.429486 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gdx6w" Oct 13 21:25:18 crc kubenswrapper[4689]: I1013 21:25:18.472664 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gdx6w" Oct 13 21:25:18 crc kubenswrapper[4689]: I1013 21:25:18.561634 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gdx6w" Oct 13 21:25:19 crc kubenswrapper[4689]: I1013 21:25:19.640887 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gdx6w"] Oct 13 21:25:20 crc kubenswrapper[4689]: I1013 21:25:20.523252 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gdx6w" podUID="7ec9bc5b-ec92-4133-a24b-cf5c6760d39f" containerName="registry-server" containerID="cri-o://5ea9553b60df61c7fa1ee2c9d735f6ae43c5fb24a0ef4d38c317e678b834ba85" gracePeriod=2 Oct 13 21:25:20 crc kubenswrapper[4689]: I1013 21:25:20.978429 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gdx6w" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.120808 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-utilities\") pod \"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f\" (UID: \"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f\") " Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.120929 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8g84\" (UniqueName: \"kubernetes.io/projected/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-kube-api-access-v8g84\") pod \"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f\" (UID: \"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f\") " Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.120956 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-catalog-content\") pod \"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f\" (UID: \"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f\") " Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.122170 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-utilities" (OuterVolumeSpecName: "utilities") pod "7ec9bc5b-ec92-4133-a24b-cf5c6760d39f" (UID: "7ec9bc5b-ec92-4133-a24b-cf5c6760d39f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.127781 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-kube-api-access-v8g84" (OuterVolumeSpecName: "kube-api-access-v8g84") pod "7ec9bc5b-ec92-4133-a24b-cf5c6760d39f" (UID: "7ec9bc5b-ec92-4133-a24b-cf5c6760d39f"). InnerVolumeSpecName "kube-api-access-v8g84". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.223245 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8g84\" (UniqueName: \"kubernetes.io/projected/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-kube-api-access-v8g84\") on node \"crc\" DevicePath \"\"" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.223320 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.555934 4689 generic.go:334] "Generic (PLEG): container finished" podID="7ec9bc5b-ec92-4133-a24b-cf5c6760d39f" containerID="5ea9553b60df61c7fa1ee2c9d735f6ae43c5fb24a0ef4d38c317e678b834ba85" exitCode=0 Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.556001 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gdx6w" event={"ID":"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f","Type":"ContainerDied","Data":"5ea9553b60df61c7fa1ee2c9d735f6ae43c5fb24a0ef4d38c317e678b834ba85"} Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.556037 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gdx6w" event={"ID":"7ec9bc5b-ec92-4133-a24b-cf5c6760d39f","Type":"ContainerDied","Data":"53b630d216d16d640236c38ee9c7b03376a6c60fbb416c07f4daa57721a2bd4f"} Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.556060 4689 scope.go:117] "RemoveContainer" containerID="5ea9553b60df61c7fa1ee2c9d735f6ae43c5fb24a0ef4d38c317e678b834ba85" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.556316 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gdx6w" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.556379 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-2t6mt"] Oct 13 21:25:21 crc kubenswrapper[4689]: E1013 21:25:21.557044 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ec9bc5b-ec92-4133-a24b-cf5c6760d39f" containerName="registry-server" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.557070 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ec9bc5b-ec92-4133-a24b-cf5c6760d39f" containerName="registry-server" Oct 13 21:25:21 crc kubenswrapper[4689]: E1013 21:25:21.557115 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ec9bc5b-ec92-4133-a24b-cf5c6760d39f" containerName="extract-content" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.557124 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ec9bc5b-ec92-4133-a24b-cf5c6760d39f" containerName="extract-content" Oct 13 21:25:21 crc kubenswrapper[4689]: E1013 21:25:21.557153 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ec9bc5b-ec92-4133-a24b-cf5c6760d39f" containerName="extract-utilities" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.557162 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ec9bc5b-ec92-4133-a24b-cf5c6760d39f" containerName="extract-utilities" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.557383 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ec9bc5b-ec92-4133-a24b-cf5c6760d39f" containerName="registry-server" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.558418 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-2t6mt" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.567802 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.567839 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-gh2x7" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.567961 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.568080 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.578055 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-2t6mt"] Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.629131 4689 scope.go:117] "RemoveContainer" containerID="e83c42a745f783e1e89132f4602018d71779ee36def40989a3cac6a7cb928517" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.649910 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mwwx8"] Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.651073 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-mwwx8" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.656337 4689 scope.go:117] "RemoveContainer" containerID="e46b0d3436ce84c29cab8763cb9996c6ab0c4ac43868f950f0ad72e3fe966758" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.656541 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.693345 4689 scope.go:117] "RemoveContainer" containerID="5ea9553b60df61c7fa1ee2c9d735f6ae43c5fb24a0ef4d38c317e678b834ba85" Oct 13 21:25:21 crc kubenswrapper[4689]: E1013 21:25:21.693979 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ea9553b60df61c7fa1ee2c9d735f6ae43c5fb24a0ef4d38c317e678b834ba85\": container with ID starting with 5ea9553b60df61c7fa1ee2c9d735f6ae43c5fb24a0ef4d38c317e678b834ba85 not found: ID does not exist" containerID="5ea9553b60df61c7fa1ee2c9d735f6ae43c5fb24a0ef4d38c317e678b834ba85" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.694045 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ea9553b60df61c7fa1ee2c9d735f6ae43c5fb24a0ef4d38c317e678b834ba85"} err="failed to get container status \"5ea9553b60df61c7fa1ee2c9d735f6ae43c5fb24a0ef4d38c317e678b834ba85\": rpc error: code = NotFound desc = could not find container \"5ea9553b60df61c7fa1ee2c9d735f6ae43c5fb24a0ef4d38c317e678b834ba85\": container with ID starting with 5ea9553b60df61c7fa1ee2c9d735f6ae43c5fb24a0ef4d38c317e678b834ba85 not found: ID does not exist" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.694096 4689 scope.go:117] "RemoveContainer" containerID="e83c42a745f783e1e89132f4602018d71779ee36def40989a3cac6a7cb928517" Oct 13 21:25:21 crc kubenswrapper[4689]: E1013 21:25:21.694991 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e83c42a745f783e1e89132f4602018d71779ee36def40989a3cac6a7cb928517\": container with ID starting with e83c42a745f783e1e89132f4602018d71779ee36def40989a3cac6a7cb928517 not found: ID does not exist" containerID="e83c42a745f783e1e89132f4602018d71779ee36def40989a3cac6a7cb928517" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.695031 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e83c42a745f783e1e89132f4602018d71779ee36def40989a3cac6a7cb928517"} err="failed to get container status \"e83c42a745f783e1e89132f4602018d71779ee36def40989a3cac6a7cb928517\": rpc error: code = NotFound desc = could not find container \"e83c42a745f783e1e89132f4602018d71779ee36def40989a3cac6a7cb928517\": container with ID starting with e83c42a745f783e1e89132f4602018d71779ee36def40989a3cac6a7cb928517 not found: ID does not exist" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.695064 4689 scope.go:117] "RemoveContainer" containerID="e46b0d3436ce84c29cab8763cb9996c6ab0c4ac43868f950f0ad72e3fe966758" Oct 13 21:25:21 crc kubenswrapper[4689]: E1013 21:25:21.695347 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e46b0d3436ce84c29cab8763cb9996c6ab0c4ac43868f950f0ad72e3fe966758\": container with ID starting with e46b0d3436ce84c29cab8763cb9996c6ab0c4ac43868f950f0ad72e3fe966758 not found: ID does not exist" containerID="e46b0d3436ce84c29cab8763cb9996c6ab0c4ac43868f950f0ad72e3fe966758" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.695374 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e46b0d3436ce84c29cab8763cb9996c6ab0c4ac43868f950f0ad72e3fe966758"} err="failed to get container status \"e46b0d3436ce84c29cab8763cb9996c6ab0c4ac43868f950f0ad72e3fe966758\": rpc error: code = NotFound desc = could not find container \"e46b0d3436ce84c29cab8763cb9996c6ab0c4ac43868f950f0ad72e3fe966758\": container with ID starting with e46b0d3436ce84c29cab8763cb9996c6ab0c4ac43868f950f0ad72e3fe966758 not found: ID does not exist" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.730223 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9544a6b-459f-4fc8-9dec-206254bb681e-config\") pod \"dnsmasq-dns-675f4bcbfc-2t6mt\" (UID: \"e9544a6b-459f-4fc8-9dec-206254bb681e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-2t6mt" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.730265 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8584f118-1d6d-47f9-bda5-cdc61f1985ee-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-mwwx8\" (UID: \"8584f118-1d6d-47f9-bda5-cdc61f1985ee\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mwwx8" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.730283 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxs6x\" (UniqueName: \"kubernetes.io/projected/e9544a6b-459f-4fc8-9dec-206254bb681e-kube-api-access-wxs6x\") pod \"dnsmasq-dns-675f4bcbfc-2t6mt\" (UID: \"e9544a6b-459f-4fc8-9dec-206254bb681e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-2t6mt" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.730308 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrkz8\" (UniqueName: \"kubernetes.io/projected/8584f118-1d6d-47f9-bda5-cdc61f1985ee-kube-api-access-mrkz8\") pod \"dnsmasq-dns-78dd6ddcc-mwwx8\" (UID: \"8584f118-1d6d-47f9-bda5-cdc61f1985ee\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mwwx8" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.730328 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8584f118-1d6d-47f9-bda5-cdc61f1985ee-config\") pod \"dnsmasq-dns-78dd6ddcc-mwwx8\" (UID: \"8584f118-1d6d-47f9-bda5-cdc61f1985ee\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mwwx8" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.743214 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mwwx8"] Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.832599 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8584f118-1d6d-47f9-bda5-cdc61f1985ee-config\") pod \"dnsmasq-dns-78dd6ddcc-mwwx8\" (UID: \"8584f118-1d6d-47f9-bda5-cdc61f1985ee\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mwwx8" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.832728 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8584f118-1d6d-47f9-bda5-cdc61f1985ee-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-mwwx8\" (UID: \"8584f118-1d6d-47f9-bda5-cdc61f1985ee\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mwwx8" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.832753 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9544a6b-459f-4fc8-9dec-206254bb681e-config\") pod \"dnsmasq-dns-675f4bcbfc-2t6mt\" (UID: \"e9544a6b-459f-4fc8-9dec-206254bb681e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-2t6mt" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.832770 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxs6x\" (UniqueName: \"kubernetes.io/projected/e9544a6b-459f-4fc8-9dec-206254bb681e-kube-api-access-wxs6x\") pod \"dnsmasq-dns-675f4bcbfc-2t6mt\" (UID: \"e9544a6b-459f-4fc8-9dec-206254bb681e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-2t6mt" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.832794 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrkz8\" (UniqueName: \"kubernetes.io/projected/8584f118-1d6d-47f9-bda5-cdc61f1985ee-kube-api-access-mrkz8\") pod \"dnsmasq-dns-78dd6ddcc-mwwx8\" (UID: \"8584f118-1d6d-47f9-bda5-cdc61f1985ee\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mwwx8" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.833554 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8584f118-1d6d-47f9-bda5-cdc61f1985ee-config\") pod \"dnsmasq-dns-78dd6ddcc-mwwx8\" (UID: \"8584f118-1d6d-47f9-bda5-cdc61f1985ee\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mwwx8" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.833555 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8584f118-1d6d-47f9-bda5-cdc61f1985ee-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-mwwx8\" (UID: \"8584f118-1d6d-47f9-bda5-cdc61f1985ee\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mwwx8" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.833703 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9544a6b-459f-4fc8-9dec-206254bb681e-config\") pod \"dnsmasq-dns-675f4bcbfc-2t6mt\" (UID: \"e9544a6b-459f-4fc8-9dec-206254bb681e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-2t6mt" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.880935 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxs6x\" (UniqueName: \"kubernetes.io/projected/e9544a6b-459f-4fc8-9dec-206254bb681e-kube-api-access-wxs6x\") pod \"dnsmasq-dns-675f4bcbfc-2t6mt\" (UID: \"e9544a6b-459f-4fc8-9dec-206254bb681e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-2t6mt" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.893619 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrkz8\" (UniqueName: \"kubernetes.io/projected/8584f118-1d6d-47f9-bda5-cdc61f1985ee-kube-api-access-mrkz8\") pod \"dnsmasq-dns-78dd6ddcc-mwwx8\" (UID: \"8584f118-1d6d-47f9-bda5-cdc61f1985ee\") " pod="openstack/dnsmasq-dns-78dd6ddcc-mwwx8" Oct 13 21:25:21 crc kubenswrapper[4689]: I1013 21:25:21.993525 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7ec9bc5b-ec92-4133-a24b-cf5c6760d39f" (UID: "7ec9bc5b-ec92-4133-a24b-cf5c6760d39f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:25:22 crc kubenswrapper[4689]: I1013 21:25:22.021915 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-mwwx8" Oct 13 21:25:22 crc kubenswrapper[4689]: I1013 21:25:22.036465 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:25:22 crc kubenswrapper[4689]: I1013 21:25:22.170775 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bdjfj" Oct 13 21:25:22 crc kubenswrapper[4689]: I1013 21:25:22.171242 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bdjfj" Oct 13 21:25:22 crc kubenswrapper[4689]: I1013 21:25:22.179486 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-2t6mt" Oct 13 21:25:22 crc kubenswrapper[4689]: I1013 21:25:22.194361 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gdx6w"] Oct 13 21:25:22 crc kubenswrapper[4689]: I1013 21:25:22.201941 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gdx6w"] Oct 13 21:25:22 crc kubenswrapper[4689]: I1013 21:25:22.241213 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bdjfj" Oct 13 21:25:22 crc kubenswrapper[4689]: I1013 21:25:22.444861 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mwwx8"] Oct 13 21:25:22 crc kubenswrapper[4689]: I1013 21:25:22.564808 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-mwwx8" event={"ID":"8584f118-1d6d-47f9-bda5-cdc61f1985ee","Type":"ContainerStarted","Data":"d72abeb505aa3a0aac63b7847d069a50dc03c6091e4665c83056b7a90ff6529a"} Oct 13 21:25:22 crc kubenswrapper[4689]: I1013 21:25:22.674253 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bdjfj" Oct 13 21:25:22 crc kubenswrapper[4689]: I1013 21:25:22.677231 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-2t6mt"] Oct 13 21:25:22 crc kubenswrapper[4689]: W1013 21:25:22.679706 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9544a6b_459f_4fc8_9dec_206254bb681e.slice/crio-fbdc350727007e0cb14faa5366e8306ccb552db4ff224fa80bf0ffbc481ba33a WatchSource:0}: Error finding container fbdc350727007e0cb14faa5366e8306ccb552db4ff224fa80bf0ffbc481ba33a: Status 404 returned error can't find the container with id fbdc350727007e0cb14faa5366e8306ccb552db4ff224fa80bf0ffbc481ba33a Oct 13 21:25:23 crc kubenswrapper[4689]: I1013 21:25:23.583552 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-2t6mt" event={"ID":"e9544a6b-459f-4fc8-9dec-206254bb681e","Type":"ContainerStarted","Data":"fbdc350727007e0cb14faa5366e8306ccb552db4ff224fa80bf0ffbc481ba33a"} Oct 13 21:25:23 crc kubenswrapper[4689]: I1013 21:25:23.864226 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:25:23 crc kubenswrapper[4689]: I1013 21:25:23.864445 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:25:23 crc kubenswrapper[4689]: I1013 21:25:23.864604 4689 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:25:23 crc kubenswrapper[4689]: I1013 21:25:23.865221 4689 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d6e09cc8455c50704247801d05cbebdbf7631acaa5e20cfd3bbcab24f523d8ed"} pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 21:25:23 crc kubenswrapper[4689]: I1013 21:25:23.865328 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" containerID="cri-o://d6e09cc8455c50704247801d05cbebdbf7631acaa5e20cfd3bbcab24f523d8ed" gracePeriod=600 Oct 13 21:25:23 crc kubenswrapper[4689]: I1013 21:25:23.892209 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ec9bc5b-ec92-4133-a24b-cf5c6760d39f" path="/var/lib/kubelet/pods/7ec9bc5b-ec92-4133-a24b-cf5c6760d39f/volumes" Oct 13 21:25:24 crc kubenswrapper[4689]: I1013 21:25:24.595944 4689 generic.go:334] "Generic (PLEG): container finished" podID="1863da92-265f-451e-a741-a184c8d3f781" containerID="d6e09cc8455c50704247801d05cbebdbf7631acaa5e20cfd3bbcab24f523d8ed" exitCode=0 Oct 13 21:25:24 crc kubenswrapper[4689]: I1013 21:25:24.596024 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerDied","Data":"d6e09cc8455c50704247801d05cbebdbf7631acaa5e20cfd3bbcab24f523d8ed"} Oct 13 21:25:24 crc kubenswrapper[4689]: I1013 21:25:24.596326 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerStarted","Data":"ab2b61226a649c7705a70b5c1bf03941d31100bc06b01a9ba4b9500ce87dedb9"} Oct 13 21:25:24 crc kubenswrapper[4689]: I1013 21:25:24.596351 4689 scope.go:117] "RemoveContainer" containerID="b8c850c25ce9ad448a6035ec03d9103c3aaadb9f60c108dca0caaf9e3d4833c4" Oct 13 21:25:24 crc kubenswrapper[4689]: I1013 21:25:24.642002 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bdjfj"] Oct 13 21:25:24 crc kubenswrapper[4689]: I1013 21:25:24.642298 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bdjfj" podUID="435c993a-77b5-4e4d-b481-a1c79eed8c58" containerName="registry-server" containerID="cri-o://97a5e834b82a4c10bded9c463df3796d0ab43bc3cafce6b42bb1cc835161e4d8" gracePeriod=2 Oct 13 21:25:24 crc kubenswrapper[4689]: E1013 21:25:24.774461 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod435c993a_77b5_4e4d_b481_a1c79eed8c58.slice/crio-97a5e834b82a4c10bded9c463df3796d0ab43bc3cafce6b42bb1cc835161e4d8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod435c993a_77b5_4e4d_b481_a1c79eed8c58.slice/crio-conmon-97a5e834b82a4c10bded9c463df3796d0ab43bc3cafce6b42bb1cc835161e4d8.scope\": RecentStats: unable to find data in memory cache]" Oct 13 21:25:24 crc kubenswrapper[4689]: I1013 21:25:24.935707 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-2t6mt"] Oct 13 21:25:24 crc kubenswrapper[4689]: I1013 21:25:24.981731 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-b55mj"] Oct 13 21:25:24 crc kubenswrapper[4689]: I1013 21:25:24.983689 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-b55mj" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.000074 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-b55mj"] Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.114047 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de387822-f6b5-43cc-80f4-2ba23cf2e43f-config\") pod \"dnsmasq-dns-666b6646f7-b55mj\" (UID: \"de387822-f6b5-43cc-80f4-2ba23cf2e43f\") " pod="openstack/dnsmasq-dns-666b6646f7-b55mj" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.114661 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pd8d\" (UniqueName: \"kubernetes.io/projected/de387822-f6b5-43cc-80f4-2ba23cf2e43f-kube-api-access-2pd8d\") pod \"dnsmasq-dns-666b6646f7-b55mj\" (UID: \"de387822-f6b5-43cc-80f4-2ba23cf2e43f\") " pod="openstack/dnsmasq-dns-666b6646f7-b55mj" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.114715 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de387822-f6b5-43cc-80f4-2ba23cf2e43f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-b55mj\" (UID: \"de387822-f6b5-43cc-80f4-2ba23cf2e43f\") " pod="openstack/dnsmasq-dns-666b6646f7-b55mj" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.217416 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de387822-f6b5-43cc-80f4-2ba23cf2e43f-config\") pod \"dnsmasq-dns-666b6646f7-b55mj\" (UID: \"de387822-f6b5-43cc-80f4-2ba23cf2e43f\") " pod="openstack/dnsmasq-dns-666b6646f7-b55mj" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.217529 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pd8d\" (UniqueName: \"kubernetes.io/projected/de387822-f6b5-43cc-80f4-2ba23cf2e43f-kube-api-access-2pd8d\") pod \"dnsmasq-dns-666b6646f7-b55mj\" (UID: \"de387822-f6b5-43cc-80f4-2ba23cf2e43f\") " pod="openstack/dnsmasq-dns-666b6646f7-b55mj" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.217553 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de387822-f6b5-43cc-80f4-2ba23cf2e43f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-b55mj\" (UID: \"de387822-f6b5-43cc-80f4-2ba23cf2e43f\") " pod="openstack/dnsmasq-dns-666b6646f7-b55mj" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.218543 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de387822-f6b5-43cc-80f4-2ba23cf2e43f-config\") pod \"dnsmasq-dns-666b6646f7-b55mj\" (UID: \"de387822-f6b5-43cc-80f4-2ba23cf2e43f\") " pod="openstack/dnsmasq-dns-666b6646f7-b55mj" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.219864 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de387822-f6b5-43cc-80f4-2ba23cf2e43f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-b55mj\" (UID: \"de387822-f6b5-43cc-80f4-2ba23cf2e43f\") " pod="openstack/dnsmasq-dns-666b6646f7-b55mj" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.239154 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pd8d\" (UniqueName: \"kubernetes.io/projected/de387822-f6b5-43cc-80f4-2ba23cf2e43f-kube-api-access-2pd8d\") pod \"dnsmasq-dns-666b6646f7-b55mj\" (UID: \"de387822-f6b5-43cc-80f4-2ba23cf2e43f\") " pod="openstack/dnsmasq-dns-666b6646f7-b55mj" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.260734 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mwwx8"] Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.280606 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-g24w7"] Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.283250 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.299906 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-g24w7"] Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.317364 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-b55mj" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.419210 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgz5x\" (UniqueName: \"kubernetes.io/projected/84a133e7-f634-4caf-988b-f7c3f0176e52-kube-api-access-sgz5x\") pod \"dnsmasq-dns-57d769cc4f-g24w7\" (UID: \"84a133e7-f634-4caf-988b-f7c3f0176e52\") " pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.419261 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/84a133e7-f634-4caf-988b-f7c3f0176e52-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-g24w7\" (UID: \"84a133e7-f634-4caf-988b-f7c3f0176e52\") " pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.419298 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84a133e7-f634-4caf-988b-f7c3f0176e52-config\") pod \"dnsmasq-dns-57d769cc4f-g24w7\" (UID: \"84a133e7-f634-4caf-988b-f7c3f0176e52\") " pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.521206 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/84a133e7-f634-4caf-988b-f7c3f0176e52-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-g24w7\" (UID: \"84a133e7-f634-4caf-988b-f7c3f0176e52\") " pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.521277 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84a133e7-f634-4caf-988b-f7c3f0176e52-config\") pod \"dnsmasq-dns-57d769cc4f-g24w7\" (UID: \"84a133e7-f634-4caf-988b-f7c3f0176e52\") " pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.521367 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgz5x\" (UniqueName: \"kubernetes.io/projected/84a133e7-f634-4caf-988b-f7c3f0176e52-kube-api-access-sgz5x\") pod \"dnsmasq-dns-57d769cc4f-g24w7\" (UID: \"84a133e7-f634-4caf-988b-f7c3f0176e52\") " pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.524420 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/84a133e7-f634-4caf-988b-f7c3f0176e52-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-g24w7\" (UID: \"84a133e7-f634-4caf-988b-f7c3f0176e52\") " pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.524559 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84a133e7-f634-4caf-988b-f7c3f0176e52-config\") pod \"dnsmasq-dns-57d769cc4f-g24w7\" (UID: \"84a133e7-f634-4caf-988b-f7c3f0176e52\") " pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.536920 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgz5x\" (UniqueName: \"kubernetes.io/projected/84a133e7-f634-4caf-988b-f7c3f0176e52-kube-api-access-sgz5x\") pod \"dnsmasq-dns-57d769cc4f-g24w7\" (UID: \"84a133e7-f634-4caf-988b-f7c3f0176e52\") " pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.611632 4689 generic.go:334] "Generic (PLEG): container finished" podID="435c993a-77b5-4e4d-b481-a1c79eed8c58" containerID="97a5e834b82a4c10bded9c463df3796d0ab43bc3cafce6b42bb1cc835161e4d8" exitCode=0 Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.611697 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdjfj" event={"ID":"435c993a-77b5-4e4d-b481-a1c79eed8c58","Type":"ContainerDied","Data":"97a5e834b82a4c10bded9c463df3796d0ab43bc3cafce6b42bb1cc835161e4d8"} Oct 13 21:25:25 crc kubenswrapper[4689]: I1013 21:25:25.613361 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.117899 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.119542 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.132458 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.132694 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.138705 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-l624j" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.138729 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.138829 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.138923 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.139445 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.146471 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.230318 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4fa622b7-d774-4b55-a3e7-2053625177ca-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.230376 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4fa622b7-d774-4b55-a3e7-2053625177ca-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.230409 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.230433 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8fsz\" (UniqueName: \"kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-kube-api-access-c8fsz\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.230454 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.230734 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.230840 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-config-data\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.230886 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.230984 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.231138 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.231355 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.333180 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.333240 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4fa622b7-d774-4b55-a3e7-2053625177ca-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.333287 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4fa622b7-d774-4b55-a3e7-2053625177ca-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.333317 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.333344 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8fsz\" (UniqueName: \"kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-kube-api-access-c8fsz\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.333367 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.333400 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.333422 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-config-data\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.333438 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.333469 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.333483 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.334569 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.334688 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.334729 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.335199 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.336018 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-config-data\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.338846 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.339382 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.340922 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4fa622b7-d774-4b55-a3e7-2053625177ca-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.351054 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4fa622b7-d774-4b55-a3e7-2053625177ca-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.352886 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8fsz\" (UniqueName: \"kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-kube-api-access-c8fsz\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.356181 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.360376 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.387552 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.388765 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.394692 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.398788 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.399093 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.399294 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.399487 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.399852 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.400086 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-m8fn9" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.406280 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.448669 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.536173 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b974f9f4-057e-4a9c-9835-a9636d5601f8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.536280 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.536329 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b974f9f4-057e-4a9c-9835-a9636d5601f8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.536420 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.536454 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r59sq\" (UniqueName: \"kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-kube-api-access-r59sq\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.536501 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.536729 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.536806 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.536865 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.536988 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.537062 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.638498 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.639050 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.639099 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b974f9f4-057e-4a9c-9835-a9636d5601f8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.639130 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.639159 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b974f9f4-057e-4a9c-9835-a9636d5601f8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.639216 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.639271 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r59sq\" (UniqueName: \"kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-kube-api-access-r59sq\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.639323 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.639384 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.639425 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.639466 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.639691 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.641101 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.641470 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.642038 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.643150 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.644182 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.645341 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b974f9f4-057e-4a9c-9835-a9636d5601f8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.645751 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.645924 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.657975 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b974f9f4-057e-4a9c-9835-a9636d5601f8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.660532 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r59sq\" (UniqueName: \"kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-kube-api-access-r59sq\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.667379 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:26 crc kubenswrapper[4689]: I1013 21:25:26.740950 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.801569 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.803390 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.815003 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.822866 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.827756 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.827863 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.829866 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-w5f6s" Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.840605 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.843721 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.966078 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/241dd2d8-e2a2-4653-bfc9-24255216fad4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.966151 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/241dd2d8-e2a2-4653-bfc9-24255216fad4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.966172 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.966225 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/241dd2d8-e2a2-4653-bfc9-24255216fad4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.966242 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/241dd2d8-e2a2-4653-bfc9-24255216fad4-kolla-config\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.966268 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/241dd2d8-e2a2-4653-bfc9-24255216fad4-secrets\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.966306 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/241dd2d8-e2a2-4653-bfc9-24255216fad4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.966360 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/241dd2d8-e2a2-4653-bfc9-24255216fad4-config-data-default\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:27 crc kubenswrapper[4689]: I1013 21:25:27.966378 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dz8hm\" (UniqueName: \"kubernetes.io/projected/241dd2d8-e2a2-4653-bfc9-24255216fad4-kube-api-access-dz8hm\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.067834 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/241dd2d8-e2a2-4653-bfc9-24255216fad4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.067900 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/241dd2d8-e2a2-4653-bfc9-24255216fad4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.067921 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.067955 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/241dd2d8-e2a2-4653-bfc9-24255216fad4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.067975 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/241dd2d8-e2a2-4653-bfc9-24255216fad4-kolla-config\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.068003 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/241dd2d8-e2a2-4653-bfc9-24255216fad4-secrets\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.068018 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/241dd2d8-e2a2-4653-bfc9-24255216fad4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.068039 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/241dd2d8-e2a2-4653-bfc9-24255216fad4-config-data-default\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.068053 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dz8hm\" (UniqueName: \"kubernetes.io/projected/241dd2d8-e2a2-4653-bfc9-24255216fad4-kube-api-access-dz8hm\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.068245 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.068821 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/241dd2d8-e2a2-4653-bfc9-24255216fad4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.068872 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/241dd2d8-e2a2-4653-bfc9-24255216fad4-kolla-config\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.069945 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/241dd2d8-e2a2-4653-bfc9-24255216fad4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.070082 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/241dd2d8-e2a2-4653-bfc9-24255216fad4-config-data-default\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.071502 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/241dd2d8-e2a2-4653-bfc9-24255216fad4-secrets\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.072108 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/241dd2d8-e2a2-4653-bfc9-24255216fad4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.079753 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/241dd2d8-e2a2-4653-bfc9-24255216fad4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.087440 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dz8hm\" (UniqueName: \"kubernetes.io/projected/241dd2d8-e2a2-4653-bfc9-24255216fad4-kube-api-access-dz8hm\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.093289 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"241dd2d8-e2a2-4653-bfc9-24255216fad4\") " pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.142423 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.640387 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bdjfj" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.654092 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdjfj" event={"ID":"435c993a-77b5-4e4d-b481-a1c79eed8c58","Type":"ContainerDied","Data":"6479f7bb4032ca381bcf2954ad3b8813e6201eb4fb9fe8c74db25d16913ea8c3"} Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.654152 4689 scope.go:117] "RemoveContainer" containerID="97a5e834b82a4c10bded9c463df3796d0ab43bc3cafce6b42bb1cc835161e4d8" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.654320 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bdjfj" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.778473 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/435c993a-77b5-4e4d-b481-a1c79eed8c58-utilities\") pod \"435c993a-77b5-4e4d-b481-a1c79eed8c58\" (UID: \"435c993a-77b5-4e4d-b481-a1c79eed8c58\") " Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.779472 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/435c993a-77b5-4e4d-b481-a1c79eed8c58-utilities" (OuterVolumeSpecName: "utilities") pod "435c993a-77b5-4e4d-b481-a1c79eed8c58" (UID: "435c993a-77b5-4e4d-b481-a1c79eed8c58"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.782647 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvlvc\" (UniqueName: \"kubernetes.io/projected/435c993a-77b5-4e4d-b481-a1c79eed8c58-kube-api-access-xvlvc\") pod \"435c993a-77b5-4e4d-b481-a1c79eed8c58\" (UID: \"435c993a-77b5-4e4d-b481-a1c79eed8c58\") " Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.782704 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/435c993a-77b5-4e4d-b481-a1c79eed8c58-catalog-content\") pod \"435c993a-77b5-4e4d-b481-a1c79eed8c58\" (UID: \"435c993a-77b5-4e4d-b481-a1c79eed8c58\") " Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.783628 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/435c993a-77b5-4e4d-b481-a1c79eed8c58-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.786677 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/435c993a-77b5-4e4d-b481-a1c79eed8c58-kube-api-access-xvlvc" (OuterVolumeSpecName: "kube-api-access-xvlvc") pod "435c993a-77b5-4e4d-b481-a1c79eed8c58" (UID: "435c993a-77b5-4e4d-b481-a1c79eed8c58"). InnerVolumeSpecName "kube-api-access-xvlvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.840882 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/435c993a-77b5-4e4d-b481-a1c79eed8c58-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "435c993a-77b5-4e4d-b481-a1c79eed8c58" (UID: "435c993a-77b5-4e4d-b481-a1c79eed8c58"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.884882 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvlvc\" (UniqueName: \"kubernetes.io/projected/435c993a-77b5-4e4d-b481-a1c79eed8c58-kube-api-access-xvlvc\") on node \"crc\" DevicePath \"\"" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.884913 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/435c993a-77b5-4e4d-b481-a1c79eed8c58-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:25:28 crc kubenswrapper[4689]: I1013 21:25:28.994796 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bdjfj"] Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.001505 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bdjfj"] Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.156052 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 13 21:25:29 crc kubenswrapper[4689]: E1013 21:25:29.156604 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="435c993a-77b5-4e4d-b481-a1c79eed8c58" containerName="extract-content" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.156626 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="435c993a-77b5-4e4d-b481-a1c79eed8c58" containerName="extract-content" Oct 13 21:25:29 crc kubenswrapper[4689]: E1013 21:25:29.156669 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="435c993a-77b5-4e4d-b481-a1c79eed8c58" containerName="registry-server" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.156677 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="435c993a-77b5-4e4d-b481-a1c79eed8c58" containerName="registry-server" Oct 13 21:25:29 crc kubenswrapper[4689]: E1013 21:25:29.156706 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="435c993a-77b5-4e4d-b481-a1c79eed8c58" containerName="extract-utilities" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.156713 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="435c993a-77b5-4e4d-b481-a1c79eed8c58" containerName="extract-utilities" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.156960 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="435c993a-77b5-4e4d-b481-a1c79eed8c58" containerName="registry-server" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.158496 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.162316 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-x5rtl" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.162934 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.163671 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.163680 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.163778 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.295852 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.296090 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.296211 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.296306 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsvn6\" (UniqueName: \"kubernetes.io/projected/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-kube-api-access-vsvn6\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.296414 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.296497 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.296636 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.296748 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.296831 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.398426 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.398483 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.398515 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.398565 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.398932 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.398977 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.399312 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.399762 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.399847 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.399907 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsvn6\" (UniqueName: \"kubernetes.io/projected/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-kube-api-access-vsvn6\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.399980 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.400008 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.400826 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.401019 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.404804 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.409035 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.412294 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.431202 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsvn6\" (UniqueName: \"kubernetes.io/projected/4fc44e1c-da65-48c1-ad48-8b41c9bf4391-kube-api-access-vsvn6\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.545565 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.546884 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.550080 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-jg45f" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.550289 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.550418 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.550711 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-cell1-galera-0\" (UID: \"4fc44e1c-da65-48c1-ad48-8b41c9bf4391\") " pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.564245 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.709123 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/83675f56-8efb-4eb1-b6e5-65dde48c3ee4-memcached-tls-certs\") pod \"memcached-0\" (UID: \"83675f56-8efb-4eb1-b6e5-65dde48c3ee4\") " pod="openstack/memcached-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.709234 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83675f56-8efb-4eb1-b6e5-65dde48c3ee4-combined-ca-bundle\") pod \"memcached-0\" (UID: \"83675f56-8efb-4eb1-b6e5-65dde48c3ee4\") " pod="openstack/memcached-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.709261 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzxdf\" (UniqueName: \"kubernetes.io/projected/83675f56-8efb-4eb1-b6e5-65dde48c3ee4-kube-api-access-jzxdf\") pod \"memcached-0\" (UID: \"83675f56-8efb-4eb1-b6e5-65dde48c3ee4\") " pod="openstack/memcached-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.709277 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/83675f56-8efb-4eb1-b6e5-65dde48c3ee4-kolla-config\") pod \"memcached-0\" (UID: \"83675f56-8efb-4eb1-b6e5-65dde48c3ee4\") " pod="openstack/memcached-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.709328 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/83675f56-8efb-4eb1-b6e5-65dde48c3ee4-config-data\") pod \"memcached-0\" (UID: \"83675f56-8efb-4eb1-b6e5-65dde48c3ee4\") " pod="openstack/memcached-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.799966 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.810993 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83675f56-8efb-4eb1-b6e5-65dde48c3ee4-combined-ca-bundle\") pod \"memcached-0\" (UID: \"83675f56-8efb-4eb1-b6e5-65dde48c3ee4\") " pod="openstack/memcached-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.811042 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzxdf\" (UniqueName: \"kubernetes.io/projected/83675f56-8efb-4eb1-b6e5-65dde48c3ee4-kube-api-access-jzxdf\") pod \"memcached-0\" (UID: \"83675f56-8efb-4eb1-b6e5-65dde48c3ee4\") " pod="openstack/memcached-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.811064 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/83675f56-8efb-4eb1-b6e5-65dde48c3ee4-kolla-config\") pod \"memcached-0\" (UID: \"83675f56-8efb-4eb1-b6e5-65dde48c3ee4\") " pod="openstack/memcached-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.811093 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/83675f56-8efb-4eb1-b6e5-65dde48c3ee4-config-data\") pod \"memcached-0\" (UID: \"83675f56-8efb-4eb1-b6e5-65dde48c3ee4\") " pod="openstack/memcached-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.811150 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/83675f56-8efb-4eb1-b6e5-65dde48c3ee4-memcached-tls-certs\") pod \"memcached-0\" (UID: \"83675f56-8efb-4eb1-b6e5-65dde48c3ee4\") " pod="openstack/memcached-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.813707 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/83675f56-8efb-4eb1-b6e5-65dde48c3ee4-kolla-config\") pod \"memcached-0\" (UID: \"83675f56-8efb-4eb1-b6e5-65dde48c3ee4\") " pod="openstack/memcached-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.814357 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/83675f56-8efb-4eb1-b6e5-65dde48c3ee4-config-data\") pod \"memcached-0\" (UID: \"83675f56-8efb-4eb1-b6e5-65dde48c3ee4\") " pod="openstack/memcached-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.816620 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83675f56-8efb-4eb1-b6e5-65dde48c3ee4-combined-ca-bundle\") pod \"memcached-0\" (UID: \"83675f56-8efb-4eb1-b6e5-65dde48c3ee4\") " pod="openstack/memcached-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.829166 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/83675f56-8efb-4eb1-b6e5-65dde48c3ee4-memcached-tls-certs\") pod \"memcached-0\" (UID: \"83675f56-8efb-4eb1-b6e5-65dde48c3ee4\") " pod="openstack/memcached-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.836996 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzxdf\" (UniqueName: \"kubernetes.io/projected/83675f56-8efb-4eb1-b6e5-65dde48c3ee4-kube-api-access-jzxdf\") pod \"memcached-0\" (UID: \"83675f56-8efb-4eb1-b6e5-65dde48c3ee4\") " pod="openstack/memcached-0" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.875426 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="435c993a-77b5-4e4d-b481-a1c79eed8c58" path="/var/lib/kubelet/pods/435c993a-77b5-4e4d-b481-a1c79eed8c58/volumes" Oct 13 21:25:29 crc kubenswrapper[4689]: I1013 21:25:29.902841 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 13 21:25:31 crc kubenswrapper[4689]: I1013 21:25:31.334072 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 21:25:31 crc kubenswrapper[4689]: I1013 21:25:31.335061 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 13 21:25:31 crc kubenswrapper[4689]: I1013 21:25:31.338787 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-8h89k" Oct 13 21:25:31 crc kubenswrapper[4689]: I1013 21:25:31.348051 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 21:25:31 crc kubenswrapper[4689]: I1013 21:25:31.441756 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddcx2\" (UniqueName: \"kubernetes.io/projected/bbf31bb3-2ca0-4223-9b65-fb4081267ea9-kube-api-access-ddcx2\") pod \"kube-state-metrics-0\" (UID: \"bbf31bb3-2ca0-4223-9b65-fb4081267ea9\") " pod="openstack/kube-state-metrics-0" Oct 13 21:25:31 crc kubenswrapper[4689]: I1013 21:25:31.543454 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddcx2\" (UniqueName: \"kubernetes.io/projected/bbf31bb3-2ca0-4223-9b65-fb4081267ea9-kube-api-access-ddcx2\") pod \"kube-state-metrics-0\" (UID: \"bbf31bb3-2ca0-4223-9b65-fb4081267ea9\") " pod="openstack/kube-state-metrics-0" Oct 13 21:25:31 crc kubenswrapper[4689]: I1013 21:25:31.560117 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddcx2\" (UniqueName: \"kubernetes.io/projected/bbf31bb3-2ca0-4223-9b65-fb4081267ea9-kube-api-access-ddcx2\") pod \"kube-state-metrics-0\" (UID: \"bbf31bb3-2ca0-4223-9b65-fb4081267ea9\") " pod="openstack/kube-state-metrics-0" Oct 13 21:25:31 crc kubenswrapper[4689]: I1013 21:25:31.655299 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 13 21:25:34 crc kubenswrapper[4689]: I1013 21:25:34.974492 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-8t9jt"] Oct 13 21:25:34 crc kubenswrapper[4689]: I1013 21:25:34.976309 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:34 crc kubenswrapper[4689]: I1013 21:25:34.980957 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 13 21:25:34 crc kubenswrapper[4689]: I1013 21:25:34.981257 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 13 21:25:34 crc kubenswrapper[4689]: I1013 21:25:34.981424 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-5vbcj" Oct 13 21:25:34 crc kubenswrapper[4689]: I1013 21:25:34.998009 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8t9jt"] Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.036008 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-6fdqj"] Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.039122 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.043488 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-6fdqj"] Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.105341 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/596fffc8-5b10-4da9-950c-ac58fafd2eb2-var-log-ovn\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.105410 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/596fffc8-5b10-4da9-950c-ac58fafd2eb2-combined-ca-bundle\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.105436 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/596fffc8-5b10-4da9-950c-ac58fafd2eb2-var-run\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.105555 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/596fffc8-5b10-4da9-950c-ac58fafd2eb2-ovn-controller-tls-certs\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.105706 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55xqr\" (UniqueName: \"kubernetes.io/projected/596fffc8-5b10-4da9-950c-ac58fafd2eb2-kube-api-access-55xqr\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.105744 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/596fffc8-5b10-4da9-950c-ac58fafd2eb2-scripts\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.105837 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/596fffc8-5b10-4da9-950c-ac58fafd2eb2-var-run-ovn\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.207276 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d43ea53c-112c-44ee-a9dd-d359de34d88b-var-lib\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.207326 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/596fffc8-5b10-4da9-950c-ac58fafd2eb2-var-run\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.207349 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d43ea53c-112c-44ee-a9dd-d359de34d88b-etc-ovs\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.207374 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/596fffc8-5b10-4da9-950c-ac58fafd2eb2-ovn-controller-tls-certs\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.207396 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d43ea53c-112c-44ee-a9dd-d359de34d88b-scripts\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.207435 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55xqr\" (UniqueName: \"kubernetes.io/projected/596fffc8-5b10-4da9-950c-ac58fafd2eb2-kube-api-access-55xqr\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.207475 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/596fffc8-5b10-4da9-950c-ac58fafd2eb2-scripts\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.207492 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gqk5\" (UniqueName: \"kubernetes.io/projected/d43ea53c-112c-44ee-a9dd-d359de34d88b-kube-api-access-2gqk5\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.207516 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d43ea53c-112c-44ee-a9dd-d359de34d88b-var-log\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.207542 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d43ea53c-112c-44ee-a9dd-d359de34d88b-var-run\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.207606 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/596fffc8-5b10-4da9-950c-ac58fafd2eb2-var-run-ovn\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.207658 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/596fffc8-5b10-4da9-950c-ac58fafd2eb2-var-log-ovn\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.207695 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/596fffc8-5b10-4da9-950c-ac58fafd2eb2-combined-ca-bundle\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.208411 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/596fffc8-5b10-4da9-950c-ac58fafd2eb2-var-run\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.208518 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/596fffc8-5b10-4da9-950c-ac58fafd2eb2-var-run-ovn\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.211511 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/596fffc8-5b10-4da9-950c-ac58fafd2eb2-scripts\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.211690 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/596fffc8-5b10-4da9-950c-ac58fafd2eb2-var-log-ovn\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.215669 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/596fffc8-5b10-4da9-950c-ac58fafd2eb2-combined-ca-bundle\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.217450 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/596fffc8-5b10-4da9-950c-ac58fafd2eb2-ovn-controller-tls-certs\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.224992 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55xqr\" (UniqueName: \"kubernetes.io/projected/596fffc8-5b10-4da9-950c-ac58fafd2eb2-kube-api-access-55xqr\") pod \"ovn-controller-8t9jt\" (UID: \"596fffc8-5b10-4da9-950c-ac58fafd2eb2\") " pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.302340 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.309448 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gqk5\" (UniqueName: \"kubernetes.io/projected/d43ea53c-112c-44ee-a9dd-d359de34d88b-kube-api-access-2gqk5\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.309498 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d43ea53c-112c-44ee-a9dd-d359de34d88b-var-log\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.309526 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d43ea53c-112c-44ee-a9dd-d359de34d88b-var-run\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.309624 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d43ea53c-112c-44ee-a9dd-d359de34d88b-var-lib\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.309657 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d43ea53c-112c-44ee-a9dd-d359de34d88b-etc-ovs\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.309682 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d43ea53c-112c-44ee-a9dd-d359de34d88b-scripts\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.309868 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d43ea53c-112c-44ee-a9dd-d359de34d88b-var-log\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.310043 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d43ea53c-112c-44ee-a9dd-d359de34d88b-var-lib\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.310097 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d43ea53c-112c-44ee-a9dd-d359de34d88b-var-run\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.310203 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d43ea53c-112c-44ee-a9dd-d359de34d88b-etc-ovs\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.312172 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d43ea53c-112c-44ee-a9dd-d359de34d88b-scripts\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.331089 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gqk5\" (UniqueName: \"kubernetes.io/projected/d43ea53c-112c-44ee-a9dd-d359de34d88b-kube-api-access-2gqk5\") pod \"ovn-controller-ovs-6fdqj\" (UID: \"d43ea53c-112c-44ee-a9dd-d359de34d88b\") " pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.372568 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.465255 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.467215 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.472159 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-44z58" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.472429 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.472465 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.472546 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.474846 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.481456 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.614522 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.614600 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-config\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.614650 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.614689 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtcjt\" (UniqueName: \"kubernetes.io/projected/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-kube-api-access-dtcjt\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.614709 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.614739 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.614757 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.614777 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.716181 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.716256 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtcjt\" (UniqueName: \"kubernetes.io/projected/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-kube-api-access-dtcjt\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.716281 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.716316 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.716335 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.716356 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.716396 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.716413 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-config\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.717123 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.717230 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.717553 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.717688 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-config\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.720488 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.722419 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.729074 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.732506 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtcjt\" (UniqueName: \"kubernetes.io/projected/dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8-kube-api-access-dtcjt\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.736036 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8\") " pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:35 crc kubenswrapper[4689]: I1013 21:25:35.794753 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:36 crc kubenswrapper[4689]: I1013 21:25:36.072241 4689 scope.go:117] "RemoveContainer" containerID="5cf56d9271bee60f74cb6560c1713464676fbd0ab3924d9b78f48cd88bb92ed3" Oct 13 21:25:36 crc kubenswrapper[4689]: E1013 21:25:36.943795 4689 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 13 21:25:36 crc kubenswrapper[4689]: E1013 21:25:36.944009 4689 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mrkz8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-mwwx8_openstack(8584f118-1d6d-47f9-bda5-cdc61f1985ee): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 13 21:25:36 crc kubenswrapper[4689]: E1013 21:25:36.949124 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-mwwx8" podUID="8584f118-1d6d-47f9-bda5-cdc61f1985ee" Oct 13 21:25:36 crc kubenswrapper[4689]: E1013 21:25:36.998488 4689 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 13 21:25:36 crc kubenswrapper[4689]: E1013 21:25:36.998698 4689 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wxs6x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-2t6mt_openstack(e9544a6b-459f-4fc8-9dec-206254bb681e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 13 21:25:37 crc kubenswrapper[4689]: E1013 21:25:37.001399 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-2t6mt" podUID="e9544a6b-459f-4fc8-9dec-206254bb681e" Oct 13 21:25:37 crc kubenswrapper[4689]: I1013 21:25:37.055324 4689 scope.go:117] "RemoveContainer" containerID="c0892ea75e1537e4dc7fd71803e0da852eb8ff55fab1e9d501957b047d5a6bfd" Oct 13 21:25:37 crc kubenswrapper[4689]: I1013 21:25:37.473275 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-g24w7"] Oct 13 21:25:37 crc kubenswrapper[4689]: I1013 21:25:37.728122 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" event={"ID":"84a133e7-f634-4caf-988b-f7c3f0176e52","Type":"ContainerStarted","Data":"07e5c45a340e6afbf3a2e8c653da1c706fdf985ed91102b9b563705f082cb795"} Oct 13 21:25:37 crc kubenswrapper[4689]: I1013 21:25:37.767025 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 21:25:37 crc kubenswrapper[4689]: I1013 21:25:37.801921 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 13 21:25:37 crc kubenswrapper[4689]: W1013 21:25:37.809242 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb974f9f4_057e_4a9c_9835_a9636d5601f8.slice/crio-28a77c2c0807a56dc6a58581f42cf5b777fc9f31422f5f4ad91ce923c8f0d871 WatchSource:0}: Error finding container 28a77c2c0807a56dc6a58581f42cf5b777fc9f31422f5f4ad91ce923c8f0d871: Status 404 returned error can't find the container with id 28a77c2c0807a56dc6a58581f42cf5b777fc9f31422f5f4ad91ce923c8f0d871 Oct 13 21:25:37 crc kubenswrapper[4689]: I1013 21:25:37.829805 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 21:25:37 crc kubenswrapper[4689]: W1013 21:25:37.850105 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod241dd2d8_e2a2_4653_bfc9_24255216fad4.slice/crio-7d5ae899d3f29ebeee5f3f6566bf0579edd95061e706c6ba2c12c4effd14e3b0 WatchSource:0}: Error finding container 7d5ae899d3f29ebeee5f3f6566bf0579edd95061e706c6ba2c12c4effd14e3b0: Status 404 returned error can't find the container with id 7d5ae899d3f29ebeee5f3f6566bf0579edd95061e706c6ba2c12c4effd14e3b0 Oct 13 21:25:37 crc kubenswrapper[4689]: I1013 21:25:37.922625 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-6fdqj"] Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.185415 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-b55mj"] Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.206720 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.232661 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.244147 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 21:25:38 crc kubenswrapper[4689]: W1013 21:25:38.268745 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fc44e1c_da65_48c1_ad48_8b41c9bf4391.slice/crio-40e34a1c5d7e312d0fc93838b45ffb36df6557ad65a21fe7ac37ad50dc5f9364 WatchSource:0}: Error finding container 40e34a1c5d7e312d0fc93838b45ffb36df6557ad65a21fe7ac37ad50dc5f9364: Status 404 returned error can't find the container with id 40e34a1c5d7e312d0fc93838b45ffb36df6557ad65a21fe7ac37ad50dc5f9364 Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.280155 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8t9jt"] Oct 13 21:25:38 crc kubenswrapper[4689]: W1013 21:25:38.287413 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod596fffc8_5b10_4da9_950c_ac58fafd2eb2.slice/crio-afd70aa5a4dd4b9f59ff8b747a858a8a3bd1bf6412dc551c780705503b646da7 WatchSource:0}: Error finding container afd70aa5a4dd4b9f59ff8b747a858a8a3bd1bf6412dc551c780705503b646da7: Status 404 returned error can't find the container with id afd70aa5a4dd4b9f59ff8b747a858a8a3bd1bf6412dc551c780705503b646da7 Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.318142 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-mwwx8" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.326488 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-2t6mt" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.373126 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.443153 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxs6x\" (UniqueName: \"kubernetes.io/projected/e9544a6b-459f-4fc8-9dec-206254bb681e-kube-api-access-wxs6x\") pod \"e9544a6b-459f-4fc8-9dec-206254bb681e\" (UID: \"e9544a6b-459f-4fc8-9dec-206254bb681e\") " Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.443275 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8584f118-1d6d-47f9-bda5-cdc61f1985ee-dns-svc\") pod \"8584f118-1d6d-47f9-bda5-cdc61f1985ee\" (UID: \"8584f118-1d6d-47f9-bda5-cdc61f1985ee\") " Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.443383 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrkz8\" (UniqueName: \"kubernetes.io/projected/8584f118-1d6d-47f9-bda5-cdc61f1985ee-kube-api-access-mrkz8\") pod \"8584f118-1d6d-47f9-bda5-cdc61f1985ee\" (UID: \"8584f118-1d6d-47f9-bda5-cdc61f1985ee\") " Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.443934 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8584f118-1d6d-47f9-bda5-cdc61f1985ee-config\") pod \"8584f118-1d6d-47f9-bda5-cdc61f1985ee\" (UID: \"8584f118-1d6d-47f9-bda5-cdc61f1985ee\") " Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.444024 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9544a6b-459f-4fc8-9dec-206254bb681e-config\") pod \"e9544a6b-459f-4fc8-9dec-206254bb681e\" (UID: \"e9544a6b-459f-4fc8-9dec-206254bb681e\") " Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.445842 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9544a6b-459f-4fc8-9dec-206254bb681e-config" (OuterVolumeSpecName: "config") pod "e9544a6b-459f-4fc8-9dec-206254bb681e" (UID: "e9544a6b-459f-4fc8-9dec-206254bb681e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.446512 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8584f118-1d6d-47f9-bda5-cdc61f1985ee-config" (OuterVolumeSpecName: "config") pod "8584f118-1d6d-47f9-bda5-cdc61f1985ee" (UID: "8584f118-1d6d-47f9-bda5-cdc61f1985ee"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.446638 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8584f118-1d6d-47f9-bda5-cdc61f1985ee-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8584f118-1d6d-47f9-bda5-cdc61f1985ee" (UID: "8584f118-1d6d-47f9-bda5-cdc61f1985ee"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.452325 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.454172 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.460433 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9544a6b-459f-4fc8-9dec-206254bb681e-kube-api-access-wxs6x" (OuterVolumeSpecName: "kube-api-access-wxs6x") pod "e9544a6b-459f-4fc8-9dec-206254bb681e" (UID: "e9544a6b-459f-4fc8-9dec-206254bb681e"). InnerVolumeSpecName "kube-api-access-wxs6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.461234 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.461520 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.461695 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.461880 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-m5jsw" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.467635 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.474837 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8584f118-1d6d-47f9-bda5-cdc61f1985ee-kube-api-access-mrkz8" (OuterVolumeSpecName: "kube-api-access-mrkz8") pod "8584f118-1d6d-47f9-bda5-cdc61f1985ee" (UID: "8584f118-1d6d-47f9-bda5-cdc61f1985ee"). InnerVolumeSpecName "kube-api-access-mrkz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.546571 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d13e0078-efa1-47aa-86f7-c7e19e2283af-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.546655 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d13e0078-efa1-47aa-86f7-c7e19e2283af-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.546688 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d13e0078-efa1-47aa-86f7-c7e19e2283af-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.546731 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d13e0078-efa1-47aa-86f7-c7e19e2283af-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.546771 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d13e0078-efa1-47aa-86f7-c7e19e2283af-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.546795 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.547140 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kb9sv\" (UniqueName: \"kubernetes.io/projected/d13e0078-efa1-47aa-86f7-c7e19e2283af-kube-api-access-kb9sv\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.547292 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d13e0078-efa1-47aa-86f7-c7e19e2283af-config\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.547580 4689 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8584f118-1d6d-47f9-bda5-cdc61f1985ee-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.547627 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8584f118-1d6d-47f9-bda5-cdc61f1985ee-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.547644 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9544a6b-459f-4fc8-9dec-206254bb681e-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.547663 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxs6x\" (UniqueName: \"kubernetes.io/projected/e9544a6b-459f-4fc8-9dec-206254bb681e-kube-api-access-wxs6x\") on node \"crc\" DevicePath \"\"" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.547681 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrkz8\" (UniqueName: \"kubernetes.io/projected/8584f118-1d6d-47f9-bda5-cdc61f1985ee-kube-api-access-mrkz8\") on node \"crc\" DevicePath \"\"" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.654864 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d13e0078-efa1-47aa-86f7-c7e19e2283af-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.655063 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d13e0078-efa1-47aa-86f7-c7e19e2283af-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.655134 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.655305 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kb9sv\" (UniqueName: \"kubernetes.io/projected/d13e0078-efa1-47aa-86f7-c7e19e2283af-kube-api-access-kb9sv\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.655379 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d13e0078-efa1-47aa-86f7-c7e19e2283af-config\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.655467 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d13e0078-efa1-47aa-86f7-c7e19e2283af-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.655569 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d13e0078-efa1-47aa-86f7-c7e19e2283af-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.655655 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d13e0078-efa1-47aa-86f7-c7e19e2283af-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.655841 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.656833 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d13e0078-efa1-47aa-86f7-c7e19e2283af-config\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.657094 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d13e0078-efa1-47aa-86f7-c7e19e2283af-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.657277 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d13e0078-efa1-47aa-86f7-c7e19e2283af-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.660653 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d13e0078-efa1-47aa-86f7-c7e19e2283af-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.661520 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d13e0078-efa1-47aa-86f7-c7e19e2283af-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.663277 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d13e0078-efa1-47aa-86f7-c7e19e2283af-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.673582 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kb9sv\" (UniqueName: \"kubernetes.io/projected/d13e0078-efa1-47aa-86f7-c7e19e2283af-kube-api-access-kb9sv\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.686777 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"d13e0078-efa1-47aa-86f7-c7e19e2283af\") " pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.739930 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4fa622b7-d774-4b55-a3e7-2053625177ca","Type":"ContainerStarted","Data":"380b9294d9531efa033fee30a5d847a46b3036637a1fd104f40125feabf20b72"} Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.741070 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-mwwx8" event={"ID":"8584f118-1d6d-47f9-bda5-cdc61f1985ee","Type":"ContainerDied","Data":"d72abeb505aa3a0aac63b7847d069a50dc03c6091e4665c83056b7a90ff6529a"} Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.741147 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-mwwx8" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.742795 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4fc44e1c-da65-48c1-ad48-8b41c9bf4391","Type":"ContainerStarted","Data":"40e34a1c5d7e312d0fc93838b45ffb36df6557ad65a21fe7ac37ad50dc5f9364"} Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.745130 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"bbf31bb3-2ca0-4223-9b65-fb4081267ea9","Type":"ContainerStarted","Data":"c8f3f8ad8811c84d9716c350e364cc43686325556da97917bee893fb75ccf1d9"} Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.746736 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-2t6mt" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.746800 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-2t6mt" event={"ID":"e9544a6b-459f-4fc8-9dec-206254bb681e","Type":"ContainerDied","Data":"fbdc350727007e0cb14faa5366e8306ccb552db4ff224fa80bf0ffbc481ba33a"} Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.748494 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-b55mj" event={"ID":"de387822-f6b5-43cc-80f4-2ba23cf2e43f","Type":"ContainerStarted","Data":"d57d1b84a6251caa0d13151e5a64b1d916a6b19cebb0744812885dc5d5e43e78"} Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.750694 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8","Type":"ContainerStarted","Data":"2481cd36df31d3ed8cee3e098c9c3e9cd468f6c3ed20fa702487bc3b41ddee54"} Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.752501 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"241dd2d8-e2a2-4653-bfc9-24255216fad4","Type":"ContainerStarted","Data":"7d5ae899d3f29ebeee5f3f6566bf0579edd95061e706c6ba2c12c4effd14e3b0"} Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.754170 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8t9jt" event={"ID":"596fffc8-5b10-4da9-950c-ac58fafd2eb2","Type":"ContainerStarted","Data":"afd70aa5a4dd4b9f59ff8b747a858a8a3bd1bf6412dc551c780705503b646da7"} Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.755546 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-6fdqj" event={"ID":"d43ea53c-112c-44ee-a9dd-d359de34d88b","Type":"ContainerStarted","Data":"e9c4042cdb0a685c2f82f58cedb0f6762a98c85b5bb1b234f913d9593b397546"} Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.757364 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"83675f56-8efb-4eb1-b6e5-65dde48c3ee4","Type":"ContainerStarted","Data":"6472cad3a19030a63848b86182971c6003551165d2eebd339916596b2734fb06"} Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.762307 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b974f9f4-057e-4a9c-9835-a9636d5601f8","Type":"ContainerStarted","Data":"28a77c2c0807a56dc6a58581f42cf5b777fc9f31422f5f4ad91ce923c8f0d871"} Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.794438 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.856573 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-2t6mt"] Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.885961 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-2t6mt"] Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.908383 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mwwx8"] Oct 13 21:25:38 crc kubenswrapper[4689]: I1013 21:25:38.917328 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-mwwx8"] Oct 13 21:25:39 crc kubenswrapper[4689]: I1013 21:25:39.402774 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 13 21:25:39 crc kubenswrapper[4689]: W1013 21:25:39.415927 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd13e0078_efa1_47aa_86f7_c7e19e2283af.slice/crio-c0397e398ed3fbba4fbbbeae4298765a32203bdab062296a9a4f46e74e1e280a WatchSource:0}: Error finding container c0397e398ed3fbba4fbbbeae4298765a32203bdab062296a9a4f46e74e1e280a: Status 404 returned error can't find the container with id c0397e398ed3fbba4fbbbeae4298765a32203bdab062296a9a4f46e74e1e280a Oct 13 21:25:39 crc kubenswrapper[4689]: I1013 21:25:39.774350 4689 generic.go:334] "Generic (PLEG): container finished" podID="de387822-f6b5-43cc-80f4-2ba23cf2e43f" containerID="bd19268fafdc961dcf46e4611316527a4e3e9376cdc7621780572ba51aa4ac61" exitCode=0 Oct 13 21:25:39 crc kubenswrapper[4689]: I1013 21:25:39.774434 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-b55mj" event={"ID":"de387822-f6b5-43cc-80f4-2ba23cf2e43f","Type":"ContainerDied","Data":"bd19268fafdc961dcf46e4611316527a4e3e9376cdc7621780572ba51aa4ac61"} Oct 13 21:25:39 crc kubenswrapper[4689]: I1013 21:25:39.777720 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"d13e0078-efa1-47aa-86f7-c7e19e2283af","Type":"ContainerStarted","Data":"c0397e398ed3fbba4fbbbeae4298765a32203bdab062296a9a4f46e74e1e280a"} Oct 13 21:25:39 crc kubenswrapper[4689]: I1013 21:25:39.780220 4689 generic.go:334] "Generic (PLEG): container finished" podID="84a133e7-f634-4caf-988b-f7c3f0176e52" containerID="bf51f2e2df262018853dc04b1dd67f48cee05ab3f3184ee75b3296eef3c5238c" exitCode=0 Oct 13 21:25:39 crc kubenswrapper[4689]: I1013 21:25:39.780260 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" event={"ID":"84a133e7-f634-4caf-988b-f7c3f0176e52","Type":"ContainerDied","Data":"bf51f2e2df262018853dc04b1dd67f48cee05ab3f3184ee75b3296eef3c5238c"} Oct 13 21:25:39 crc kubenswrapper[4689]: I1013 21:25:39.877336 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8584f118-1d6d-47f9-bda5-cdc61f1985ee" path="/var/lib/kubelet/pods/8584f118-1d6d-47f9-bda5-cdc61f1985ee/volumes" Oct 13 21:25:39 crc kubenswrapper[4689]: I1013 21:25:39.877761 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9544a6b-459f-4fc8-9dec-206254bb681e" path="/var/lib/kubelet/pods/e9544a6b-459f-4fc8-9dec-206254bb681e/volumes" Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.849888 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8","Type":"ContainerStarted","Data":"6b5ccbe925e6bb050b050150d5cf2e466a4f0b615a30f622f90f55d3915f0ee6"} Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.851743 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8t9jt" event={"ID":"596fffc8-5b10-4da9-950c-ac58fafd2eb2","Type":"ContainerStarted","Data":"683a3e2b36b64e85bea25750f20a7686c6552cf2d6fae40b8365145b7914eaa2"} Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.851867 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-8t9jt" Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.853418 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"83675f56-8efb-4eb1-b6e5-65dde48c3ee4","Type":"ContainerStarted","Data":"4b25e64aedc16e4ee468d12b4cd73f97d050770bc7c64b51a1692ac39e532e9c"} Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.853613 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.857942 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-b55mj" event={"ID":"de387822-f6b5-43cc-80f4-2ba23cf2e43f","Type":"ContainerStarted","Data":"bd2d84da71f7d881541e643af042bfddd01d9057f241d7220ababbbd5027a120"} Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.858053 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-b55mj" Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.860075 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"d13e0078-efa1-47aa-86f7-c7e19e2283af","Type":"ContainerStarted","Data":"bf17658162f0781a0318ae087080e31fec9bf769dcdcd059bea767111b44539e"} Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.862900 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" event={"ID":"84a133e7-f634-4caf-988b-f7c3f0176e52","Type":"ContainerStarted","Data":"e2991706dab0f218c7f4d16a982e010731cbe2b3e5c200b368e6e08e82bf7d4d"} Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.863001 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.864744 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4fc44e1c-da65-48c1-ad48-8b41c9bf4391","Type":"ContainerStarted","Data":"af086afce5581a6b97f5d8e72d54d1dd6acb4fe1a31778e05b560f7766b4a9ac"} Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.872798 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-8t9jt" podStartSLOduration=5.78961526 podStartE2EDuration="13.872778865s" podCreationTimestamp="2025-10-13 21:25:34 +0000 UTC" firstStartedPulling="2025-10-13 21:25:38.291127402 +0000 UTC m=+855.209372487" lastFinishedPulling="2025-10-13 21:25:46.374291007 +0000 UTC m=+863.292536092" observedRunningTime="2025-10-13 21:25:47.871992866 +0000 UTC m=+864.790237951" watchObservedRunningTime="2025-10-13 21:25:47.872778865 +0000 UTC m=+864.791023950" Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.883783 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"241dd2d8-e2a2-4653-bfc9-24255216fad4","Type":"ContainerStarted","Data":"43639d7536131353fbf0414eb00079418d70a1870ebb71b88593e0eed592035c"} Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.883867 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.883885 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"bbf31bb3-2ca0-4223-9b65-fb4081267ea9","Type":"ContainerStarted","Data":"902218d274a938c6c6bb5213b835e9706dd30aa78c42d5ef6b71beaa5ca29570"} Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.883898 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-6fdqj" event={"ID":"d43ea53c-112c-44ee-a9dd-d359de34d88b","Type":"ContainerStarted","Data":"73687e29308d553159655767baf087d8df36d08173602e4b81689c81190fc5ca"} Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.896914 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=8.056228159 podStartE2EDuration="16.896891173s" podCreationTimestamp="2025-10-13 21:25:31 +0000 UTC" firstStartedPulling="2025-10-13 21:25:38.268821416 +0000 UTC m=+855.187066501" lastFinishedPulling="2025-10-13 21:25:47.10948443 +0000 UTC m=+864.027729515" observedRunningTime="2025-10-13 21:25:47.888183968 +0000 UTC m=+864.806429053" watchObservedRunningTime="2025-10-13 21:25:47.896891173 +0000 UTC m=+864.815136278" Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.911485 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-b55mj" podStartSLOduration=22.635021684 podStartE2EDuration="23.911463467s" podCreationTimestamp="2025-10-13 21:25:24 +0000 UTC" firstStartedPulling="2025-10-13 21:25:38.212783805 +0000 UTC m=+855.131028890" lastFinishedPulling="2025-10-13 21:25:39.489225588 +0000 UTC m=+856.407470673" observedRunningTime="2025-10-13 21:25:47.907993045 +0000 UTC m=+864.826238130" watchObservedRunningTime="2025-10-13 21:25:47.911463467 +0000 UTC m=+864.829708562" Oct 13 21:25:47 crc kubenswrapper[4689]: I1013 21:25:47.933940 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" podStartSLOduration=21.092270609 podStartE2EDuration="22.933921367s" podCreationTimestamp="2025-10-13 21:25:25 +0000 UTC" firstStartedPulling="2025-10-13 21:25:37.486559814 +0000 UTC m=+854.404804899" lastFinishedPulling="2025-10-13 21:25:39.328210552 +0000 UTC m=+856.246455657" observedRunningTime="2025-10-13 21:25:47.931738855 +0000 UTC m=+864.849983960" watchObservedRunningTime="2025-10-13 21:25:47.933921367 +0000 UTC m=+864.852166452" Oct 13 21:25:48 crc kubenswrapper[4689]: I1013 21:25:48.021753 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=11.011562592 podStartE2EDuration="19.021726536s" podCreationTimestamp="2025-10-13 21:25:29 +0000 UTC" firstStartedPulling="2025-10-13 21:25:38.233853652 +0000 UTC m=+855.152098737" lastFinishedPulling="2025-10-13 21:25:46.244017596 +0000 UTC m=+863.162262681" observedRunningTime="2025-10-13 21:25:48.01806888 +0000 UTC m=+864.936313965" watchObservedRunningTime="2025-10-13 21:25:48.021726536 +0000 UTC m=+864.939971621" Oct 13 21:25:48 crc kubenswrapper[4689]: I1013 21:25:48.880951 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b974f9f4-057e-4a9c-9835-a9636d5601f8","Type":"ContainerStarted","Data":"00004912b447434369b82694f633ce610c7441a9cb15a89d73edb1d7eb308492"} Oct 13 21:25:48 crc kubenswrapper[4689]: I1013 21:25:48.884230 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4fa622b7-d774-4b55-a3e7-2053625177ca","Type":"ContainerStarted","Data":"6d93e24af37eb1e0abc87c994aeea6fb5043a90cb62d744977d5530b09573762"} Oct 13 21:25:48 crc kubenswrapper[4689]: I1013 21:25:48.886081 4689 generic.go:334] "Generic (PLEG): container finished" podID="d43ea53c-112c-44ee-a9dd-d359de34d88b" containerID="73687e29308d553159655767baf087d8df36d08173602e4b81689c81190fc5ca" exitCode=0 Oct 13 21:25:48 crc kubenswrapper[4689]: I1013 21:25:48.887501 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-6fdqj" event={"ID":"d43ea53c-112c-44ee-a9dd-d359de34d88b","Type":"ContainerDied","Data":"73687e29308d553159655767baf087d8df36d08173602e4b81689c81190fc5ca"} Oct 13 21:25:50 crc kubenswrapper[4689]: I1013 21:25:50.909726 4689 generic.go:334] "Generic (PLEG): container finished" podID="4fc44e1c-da65-48c1-ad48-8b41c9bf4391" containerID="af086afce5581a6b97f5d8e72d54d1dd6acb4fe1a31778e05b560f7766b4a9ac" exitCode=0 Oct 13 21:25:50 crc kubenswrapper[4689]: I1013 21:25:50.909823 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4fc44e1c-da65-48c1-ad48-8b41c9bf4391","Type":"ContainerDied","Data":"af086afce5581a6b97f5d8e72d54d1dd6acb4fe1a31778e05b560f7766b4a9ac"} Oct 13 21:25:50 crc kubenswrapper[4689]: I1013 21:25:50.923733 4689 generic.go:334] "Generic (PLEG): container finished" podID="241dd2d8-e2a2-4653-bfc9-24255216fad4" containerID="43639d7536131353fbf0414eb00079418d70a1870ebb71b88593e0eed592035c" exitCode=0 Oct 13 21:25:50 crc kubenswrapper[4689]: I1013 21:25:50.923792 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"241dd2d8-e2a2-4653-bfc9-24255216fad4","Type":"ContainerDied","Data":"43639d7536131353fbf0414eb00079418d70a1870ebb71b88593e0eed592035c"} Oct 13 21:25:51 crc kubenswrapper[4689]: I1013 21:25:51.941201 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-6fdqj" event={"ID":"d43ea53c-112c-44ee-a9dd-d359de34d88b","Type":"ContainerStarted","Data":"a6cfcb201fa55a5ab82e42fc727c3031b39beb732c4f0e44e6b71645ae23157c"} Oct 13 21:25:51 crc kubenswrapper[4689]: I1013 21:25:51.941767 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-6fdqj" event={"ID":"d43ea53c-112c-44ee-a9dd-d359de34d88b","Type":"ContainerStarted","Data":"29f8b0a66a599f81dc361137fb486a1e344d48594ccfefbec297b93f82547414"} Oct 13 21:25:51 crc kubenswrapper[4689]: I1013 21:25:51.942322 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:51 crc kubenswrapper[4689]: I1013 21:25:51.942366 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:25:51 crc kubenswrapper[4689]: I1013 21:25:51.944373 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"d13e0078-efa1-47aa-86f7-c7e19e2283af","Type":"ContainerStarted","Data":"241dd3021f662e9b5d1f2324f4ba75efc4a59e43198bcc575ee6ec5e49f3d936"} Oct 13 21:25:51 crc kubenswrapper[4689]: I1013 21:25:51.949906 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8","Type":"ContainerStarted","Data":"2f3ca697d65dcf93577d879f41b27526e23a9c12d41b9dbf511c3759bbde79cc"} Oct 13 21:25:51 crc kubenswrapper[4689]: I1013 21:25:51.951794 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4fc44e1c-da65-48c1-ad48-8b41c9bf4391","Type":"ContainerStarted","Data":"03a4fd2f7a8fae380ded2cf9fb119867511e0a6c8d6054626f9ee132cb96db88"} Oct 13 21:25:51 crc kubenswrapper[4689]: I1013 21:25:51.954229 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"241dd2d8-e2a2-4653-bfc9-24255216fad4","Type":"ContainerStarted","Data":"219be0618b0be1ccc6c24338e4fbb85f1ceed2ad1a8f277a437872dad16d7a59"} Oct 13 21:25:51 crc kubenswrapper[4689]: I1013 21:25:51.966915 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-6fdqj" podStartSLOduration=8.618283942 podStartE2EDuration="16.966896316s" podCreationTimestamp="2025-10-13 21:25:35 +0000 UTC" firstStartedPulling="2025-10-13 21:25:37.933143752 +0000 UTC m=+854.851388837" lastFinishedPulling="2025-10-13 21:25:46.281756126 +0000 UTC m=+863.200001211" observedRunningTime="2025-10-13 21:25:51.963781732 +0000 UTC m=+868.882026827" watchObservedRunningTime="2025-10-13 21:25:51.966896316 +0000 UTC m=+868.885141401" Oct 13 21:25:51 crc kubenswrapper[4689]: I1013 21:25:51.989358 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=15.684586947 podStartE2EDuration="23.989339125s" podCreationTimestamp="2025-10-13 21:25:28 +0000 UTC" firstStartedPulling="2025-10-13 21:25:38.271327116 +0000 UTC m=+855.189572201" lastFinishedPulling="2025-10-13 21:25:46.576079254 +0000 UTC m=+863.494324379" observedRunningTime="2025-10-13 21:25:51.983077618 +0000 UTC m=+868.901322723" watchObservedRunningTime="2025-10-13 21:25:51.989339125 +0000 UTC m=+868.907584210" Oct 13 21:25:52 crc kubenswrapper[4689]: I1013 21:25:52.020648 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=5.802244857 podStartE2EDuration="18.020627953s" podCreationTimestamp="2025-10-13 21:25:34 +0000 UTC" firstStartedPulling="2025-10-13 21:25:38.39114783 +0000 UTC m=+855.309392915" lastFinishedPulling="2025-10-13 21:25:50.609530926 +0000 UTC m=+867.527776011" observedRunningTime="2025-10-13 21:25:52.019154007 +0000 UTC m=+868.937399102" watchObservedRunningTime="2025-10-13 21:25:52.020627953 +0000 UTC m=+868.938873038" Oct 13 21:25:52 crc kubenswrapper[4689]: I1013 21:25:52.021750 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=17.364149992 podStartE2EDuration="26.021744589s" podCreationTimestamp="2025-10-13 21:25:26 +0000 UTC" firstStartedPulling="2025-10-13 21:25:37.851820845 +0000 UTC m=+854.770065920" lastFinishedPulling="2025-10-13 21:25:46.509415412 +0000 UTC m=+863.427660517" observedRunningTime="2025-10-13 21:25:52.001044851 +0000 UTC m=+868.919289936" watchObservedRunningTime="2025-10-13 21:25:52.021744589 +0000 UTC m=+868.939989674" Oct 13 21:25:52 crc kubenswrapper[4689]: I1013 21:25:52.039243 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=3.837383042 podStartE2EDuration="15.039103958s" podCreationTimestamp="2025-10-13 21:25:37 +0000 UTC" firstStartedPulling="2025-10-13 21:25:39.419703679 +0000 UTC m=+856.337948764" lastFinishedPulling="2025-10-13 21:25:50.621424545 +0000 UTC m=+867.539669680" observedRunningTime="2025-10-13 21:25:52.034365236 +0000 UTC m=+868.952610331" watchObservedRunningTime="2025-10-13 21:25:52.039103958 +0000 UTC m=+868.957349093" Oct 13 21:25:53 crc kubenswrapper[4689]: I1013 21:25:53.794940 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:53 crc kubenswrapper[4689]: I1013 21:25:53.796198 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:53 crc kubenswrapper[4689]: I1013 21:25:53.796222 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:53 crc kubenswrapper[4689]: I1013 21:25:53.841542 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:53 crc kubenswrapper[4689]: I1013 21:25:53.842506 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:53 crc kubenswrapper[4689]: I1013 21:25:53.966972 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.041635 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.056240 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.327869 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-b55mj"] Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.328287 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-b55mj" podUID="de387822-f6b5-43cc-80f4-2ba23cf2e43f" containerName="dnsmasq-dns" containerID="cri-o://bd2d84da71f7d881541e643af042bfddd01d9057f241d7220ababbbd5027a120" gracePeriod=10 Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.342152 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-666b6646f7-b55mj" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.352911 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-k9l2j"] Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.354184 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.363151 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.371380 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-k9l2j"] Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.445891 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-config\") pod \"dnsmasq-dns-7f896c8c65-k9l2j\" (UID: \"eb3f670f-4095-43d5-8376-b903cf972fc6\") " pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.445935 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-k9l2j\" (UID: \"eb3f670f-4095-43d5-8376-b903cf972fc6\") " pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.445989 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtb2s\" (UniqueName: \"kubernetes.io/projected/eb3f670f-4095-43d5-8376-b903cf972fc6-kube-api-access-dtb2s\") pod \"dnsmasq-dns-7f896c8c65-k9l2j\" (UID: \"eb3f670f-4095-43d5-8376-b903cf972fc6\") " pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.446011 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-k9l2j\" (UID: \"eb3f670f-4095-43d5-8376-b903cf972fc6\") " pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.472586 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-x69m8"] Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.473683 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.475751 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.481812 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-x69m8"] Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.547558 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtb2s\" (UniqueName: \"kubernetes.io/projected/eb3f670f-4095-43d5-8376-b903cf972fc6-kube-api-access-dtb2s\") pod \"dnsmasq-dns-7f896c8c65-k9l2j\" (UID: \"eb3f670f-4095-43d5-8376-b903cf972fc6\") " pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.547844 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-k9l2j\" (UID: \"eb3f670f-4095-43d5-8376-b903cf972fc6\") " pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.547867 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/56a091d6-e531-4956-b5aa-15f43a9c1038-ovs-rundir\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.547903 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/56a091d6-e531-4956-b5aa-15f43a9c1038-ovn-rundir\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.547926 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/56a091d6-e531-4956-b5aa-15f43a9c1038-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.547971 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56a091d6-e531-4956-b5aa-15f43a9c1038-config\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.547998 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56a091d6-e531-4956-b5aa-15f43a9c1038-combined-ca-bundle\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.548023 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kzn7\" (UniqueName: \"kubernetes.io/projected/56a091d6-e531-4956-b5aa-15f43a9c1038-kube-api-access-8kzn7\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.548069 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-config\") pod \"dnsmasq-dns-7f896c8c65-k9l2j\" (UID: \"eb3f670f-4095-43d5-8376-b903cf972fc6\") " pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.548085 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-k9l2j\" (UID: \"eb3f670f-4095-43d5-8376-b903cf972fc6\") " pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.549135 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-k9l2j\" (UID: \"eb3f670f-4095-43d5-8376-b903cf972fc6\") " pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.549901 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-k9l2j\" (UID: \"eb3f670f-4095-43d5-8376-b903cf972fc6\") " pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.550368 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-config\") pod \"dnsmasq-dns-7f896c8c65-k9l2j\" (UID: \"eb3f670f-4095-43d5-8376-b903cf972fc6\") " pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.579727 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtb2s\" (UniqueName: \"kubernetes.io/projected/eb3f670f-4095-43d5-8376-b903cf972fc6-kube-api-access-dtb2s\") pod \"dnsmasq-dns-7f896c8c65-k9l2j\" (UID: \"eb3f670f-4095-43d5-8376-b903cf972fc6\") " pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.607668 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-g24w7"] Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.608037 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" podUID="84a133e7-f634-4caf-988b-f7c3f0176e52" containerName="dnsmasq-dns" containerID="cri-o://e2991706dab0f218c7f4d16a982e010731cbe2b3e5c200b368e6e08e82bf7d4d" gracePeriod=10 Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.613565 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.637839 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-d4cq8"] Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.641948 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.647636 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.650125 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/56a091d6-e531-4956-b5aa-15f43a9c1038-ovs-rundir\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.650175 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/56a091d6-e531-4956-b5aa-15f43a9c1038-ovn-rundir\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.650210 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/56a091d6-e531-4956-b5aa-15f43a9c1038-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.650255 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56a091d6-e531-4956-b5aa-15f43a9c1038-config\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.650280 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56a091d6-e531-4956-b5aa-15f43a9c1038-combined-ca-bundle\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.650305 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kzn7\" (UniqueName: \"kubernetes.io/projected/56a091d6-e531-4956-b5aa-15f43a9c1038-kube-api-access-8kzn7\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.651003 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/56a091d6-e531-4956-b5aa-15f43a9c1038-ovs-rundir\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.651926 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/56a091d6-e531-4956-b5aa-15f43a9c1038-ovn-rundir\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.652018 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56a091d6-e531-4956-b5aa-15f43a9c1038-config\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.655461 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/56a091d6-e531-4956-b5aa-15f43a9c1038-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.673612 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56a091d6-e531-4956-b5aa-15f43a9c1038-combined-ca-bundle\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.686247 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kzn7\" (UniqueName: \"kubernetes.io/projected/56a091d6-e531-4956-b5aa-15f43a9c1038-kube-api-access-8kzn7\") pod \"ovn-controller-metrics-x69m8\" (UID: \"56a091d6-e531-4956-b5aa-15f43a9c1038\") " pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.691756 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-d4cq8"] Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.716235 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.754717 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-d4cq8\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.754824 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-d4cq8\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.754866 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-config\") pod \"dnsmasq-dns-86db49b7ff-d4cq8\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.754910 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnm4h\" (UniqueName: \"kubernetes.io/projected/72978894-08d4-40e7-ab23-9d7325ced36d-kube-api-access-lnm4h\") pod \"dnsmasq-dns-86db49b7ff-d4cq8\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.754983 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-d4cq8\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.759542 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.761163 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.765122 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.765318 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-45fg4" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.765447 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.766748 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.771979 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.793722 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-x69m8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.856756 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b50077e-96c6-4908-b3bd-5efa65b83fff-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.856902 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b50077e-96c6-4908-b3bd-5efa65b83fff-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.856968 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-d4cq8\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.857010 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-d4cq8\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.857036 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b50077e-96c6-4908-b3bd-5efa65b83fff-config\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.857079 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b50077e-96c6-4908-b3bd-5efa65b83fff-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.857128 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-d4cq8\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.857157 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw7kn\" (UniqueName: \"kubernetes.io/projected/3b50077e-96c6-4908-b3bd-5efa65b83fff-kube-api-access-tw7kn\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.857191 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-config\") pod \"dnsmasq-dns-86db49b7ff-d4cq8\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.857219 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3b50077e-96c6-4908-b3bd-5efa65b83fff-scripts\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.857257 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3b50077e-96c6-4908-b3bd-5efa65b83fff-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.857289 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnm4h\" (UniqueName: \"kubernetes.io/projected/72978894-08d4-40e7-ab23-9d7325ced36d-kube-api-access-lnm4h\") pod \"dnsmasq-dns-86db49b7ff-d4cq8\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.858952 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-d4cq8\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.858956 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-d4cq8\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.859048 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-config\") pod \"dnsmasq-dns-86db49b7ff-d4cq8\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.859562 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-d4cq8\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.877492 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnm4h\" (UniqueName: \"kubernetes.io/projected/72978894-08d4-40e7-ab23-9d7325ced36d-kube-api-access-lnm4h\") pod \"dnsmasq-dns-86db49b7ff-d4cq8\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.886014 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-b55mj" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.906744 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.958078 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de387822-f6b5-43cc-80f4-2ba23cf2e43f-config\") pod \"de387822-f6b5-43cc-80f4-2ba23cf2e43f\" (UID: \"de387822-f6b5-43cc-80f4-2ba23cf2e43f\") " Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.958218 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2pd8d\" (UniqueName: \"kubernetes.io/projected/de387822-f6b5-43cc-80f4-2ba23cf2e43f-kube-api-access-2pd8d\") pod \"de387822-f6b5-43cc-80f4-2ba23cf2e43f\" (UID: \"de387822-f6b5-43cc-80f4-2ba23cf2e43f\") " Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.958331 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de387822-f6b5-43cc-80f4-2ba23cf2e43f-dns-svc\") pod \"de387822-f6b5-43cc-80f4-2ba23cf2e43f\" (UID: \"de387822-f6b5-43cc-80f4-2ba23cf2e43f\") " Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.958585 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b50077e-96c6-4908-b3bd-5efa65b83fff-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.958685 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b50077e-96c6-4908-b3bd-5efa65b83fff-config\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.958729 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b50077e-96c6-4908-b3bd-5efa65b83fff-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.958764 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw7kn\" (UniqueName: \"kubernetes.io/projected/3b50077e-96c6-4908-b3bd-5efa65b83fff-kube-api-access-tw7kn\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.958796 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3b50077e-96c6-4908-b3bd-5efa65b83fff-scripts\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.958839 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3b50077e-96c6-4908-b3bd-5efa65b83fff-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.958901 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b50077e-96c6-4908-b3bd-5efa65b83fff-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.962243 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3b50077e-96c6-4908-b3bd-5efa65b83fff-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.964255 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b50077e-96c6-4908-b3bd-5efa65b83fff-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.965451 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3b50077e-96c6-4908-b3bd-5efa65b83fff-scripts\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.965645 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de387822-f6b5-43cc-80f4-2ba23cf2e43f-kube-api-access-2pd8d" (OuterVolumeSpecName: "kube-api-access-2pd8d") pod "de387822-f6b5-43cc-80f4-2ba23cf2e43f" (UID: "de387822-f6b5-43cc-80f4-2ba23cf2e43f"). InnerVolumeSpecName "kube-api-access-2pd8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.975046 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b50077e-96c6-4908-b3bd-5efa65b83fff-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.975210 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b50077e-96c6-4908-b3bd-5efa65b83fff-config\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.975943 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b50077e-96c6-4908-b3bd-5efa65b83fff-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.990042 4689 generic.go:334] "Generic (PLEG): container finished" podID="de387822-f6b5-43cc-80f4-2ba23cf2e43f" containerID="bd2d84da71f7d881541e643af042bfddd01d9057f241d7220ababbbd5027a120" exitCode=0 Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.990087 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-b55mj" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.990126 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-b55mj" event={"ID":"de387822-f6b5-43cc-80f4-2ba23cf2e43f","Type":"ContainerDied","Data":"bd2d84da71f7d881541e643af042bfddd01d9057f241d7220ababbbd5027a120"} Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.990182 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-b55mj" event={"ID":"de387822-f6b5-43cc-80f4-2ba23cf2e43f","Type":"ContainerDied","Data":"d57d1b84a6251caa0d13151e5a64b1d916a6b19cebb0744812885dc5d5e43e78"} Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.990201 4689 scope.go:117] "RemoveContainer" containerID="bd2d84da71f7d881541e643af042bfddd01d9057f241d7220ababbbd5027a120" Oct 13 21:25:54 crc kubenswrapper[4689]: I1013 21:25:54.990441 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw7kn\" (UniqueName: \"kubernetes.io/projected/3b50077e-96c6-4908-b3bd-5efa65b83fff-kube-api-access-tw7kn\") pod \"ovn-northd-0\" (UID: \"3b50077e-96c6-4908-b3bd-5efa65b83fff\") " pod="openstack/ovn-northd-0" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.001167 4689 generic.go:334] "Generic (PLEG): container finished" podID="84a133e7-f634-4caf-988b-f7c3f0176e52" containerID="e2991706dab0f218c7f4d16a982e010731cbe2b3e5c200b368e6e08e82bf7d4d" exitCode=0 Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.006842 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" event={"ID":"84a133e7-f634-4caf-988b-f7c3f0176e52","Type":"ContainerDied","Data":"e2991706dab0f218c7f4d16a982e010731cbe2b3e5c200b368e6e08e82bf7d4d"} Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.024671 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de387822-f6b5-43cc-80f4-2ba23cf2e43f-config" (OuterVolumeSpecName: "config") pod "de387822-f6b5-43cc-80f4-2ba23cf2e43f" (UID: "de387822-f6b5-43cc-80f4-2ba23cf2e43f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.027724 4689 scope.go:117] "RemoveContainer" containerID="bd19268fafdc961dcf46e4611316527a4e3e9376cdc7621780572ba51aa4ac61" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.060500 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de387822-f6b5-43cc-80f4-2ba23cf2e43f-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.060534 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2pd8d\" (UniqueName: \"kubernetes.io/projected/de387822-f6b5-43cc-80f4-2ba23cf2e43f-kube-api-access-2pd8d\") on node \"crc\" DevicePath \"\"" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.063279 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de387822-f6b5-43cc-80f4-2ba23cf2e43f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "de387822-f6b5-43cc-80f4-2ba23cf2e43f" (UID: "de387822-f6b5-43cc-80f4-2ba23cf2e43f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.070811 4689 scope.go:117] "RemoveContainer" containerID="bd2d84da71f7d881541e643af042bfddd01d9057f241d7220ababbbd5027a120" Oct 13 21:25:55 crc kubenswrapper[4689]: E1013 21:25:55.071288 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd2d84da71f7d881541e643af042bfddd01d9057f241d7220ababbbd5027a120\": container with ID starting with bd2d84da71f7d881541e643af042bfddd01d9057f241d7220ababbbd5027a120 not found: ID does not exist" containerID="bd2d84da71f7d881541e643af042bfddd01d9057f241d7220ababbbd5027a120" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.071315 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd2d84da71f7d881541e643af042bfddd01d9057f241d7220ababbbd5027a120"} err="failed to get container status \"bd2d84da71f7d881541e643af042bfddd01d9057f241d7220ababbbd5027a120\": rpc error: code = NotFound desc = could not find container \"bd2d84da71f7d881541e643af042bfddd01d9057f241d7220ababbbd5027a120\": container with ID starting with bd2d84da71f7d881541e643af042bfddd01d9057f241d7220ababbbd5027a120 not found: ID does not exist" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.071338 4689 scope.go:117] "RemoveContainer" containerID="bd19268fafdc961dcf46e4611316527a4e3e9376cdc7621780572ba51aa4ac61" Oct 13 21:25:55 crc kubenswrapper[4689]: E1013 21:25:55.071693 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd19268fafdc961dcf46e4611316527a4e3e9376cdc7621780572ba51aa4ac61\": container with ID starting with bd19268fafdc961dcf46e4611316527a4e3e9376cdc7621780572ba51aa4ac61 not found: ID does not exist" containerID="bd19268fafdc961dcf46e4611316527a4e3e9376cdc7621780572ba51aa4ac61" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.071716 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd19268fafdc961dcf46e4611316527a4e3e9376cdc7621780572ba51aa4ac61"} err="failed to get container status \"bd19268fafdc961dcf46e4611316527a4e3e9376cdc7621780572ba51aa4ac61\": rpc error: code = NotFound desc = could not find container \"bd19268fafdc961dcf46e4611316527a4e3e9376cdc7621780572ba51aa4ac61\": container with ID starting with bd19268fafdc961dcf46e4611316527a4e3e9376cdc7621780572ba51aa4ac61 not found: ID does not exist" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.097770 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.111006 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.161879 4689 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/de387822-f6b5-43cc-80f4-2ba23cf2e43f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.171786 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.329295 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-b55mj"] Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.338052 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-b55mj"] Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.364500 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84a133e7-f634-4caf-988b-f7c3f0176e52-config\") pod \"84a133e7-f634-4caf-988b-f7c3f0176e52\" (UID: \"84a133e7-f634-4caf-988b-f7c3f0176e52\") " Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.364558 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/84a133e7-f634-4caf-988b-f7c3f0176e52-dns-svc\") pod \"84a133e7-f634-4caf-988b-f7c3f0176e52\" (UID: \"84a133e7-f634-4caf-988b-f7c3f0176e52\") " Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.364650 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgz5x\" (UniqueName: \"kubernetes.io/projected/84a133e7-f634-4caf-988b-f7c3f0176e52-kube-api-access-sgz5x\") pod \"84a133e7-f634-4caf-988b-f7c3f0176e52\" (UID: \"84a133e7-f634-4caf-988b-f7c3f0176e52\") " Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.367753 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-k9l2j"] Oct 13 21:25:55 crc kubenswrapper[4689]: W1013 21:25:55.372478 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb3f670f_4095_43d5_8376_b903cf972fc6.slice/crio-e4be64685d102e1b079dd7dd1823dcf5d90e5c2189bbcceb529fe2c80ed879b2 WatchSource:0}: Error finding container e4be64685d102e1b079dd7dd1823dcf5d90e5c2189bbcceb529fe2c80ed879b2: Status 404 returned error can't find the container with id e4be64685d102e1b079dd7dd1823dcf5d90e5c2189bbcceb529fe2c80ed879b2 Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.376856 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84a133e7-f634-4caf-988b-f7c3f0176e52-kube-api-access-sgz5x" (OuterVolumeSpecName: "kube-api-access-sgz5x") pod "84a133e7-f634-4caf-988b-f7c3f0176e52" (UID: "84a133e7-f634-4caf-988b-f7c3f0176e52"). InnerVolumeSpecName "kube-api-access-sgz5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.408058 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84a133e7-f634-4caf-988b-f7c3f0176e52-config" (OuterVolumeSpecName: "config") pod "84a133e7-f634-4caf-988b-f7c3f0176e52" (UID: "84a133e7-f634-4caf-988b-f7c3f0176e52"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.415205 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84a133e7-f634-4caf-988b-f7c3f0176e52-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "84a133e7-f634-4caf-988b-f7c3f0176e52" (UID: "84a133e7-f634-4caf-988b-f7c3f0176e52"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:25:55 crc kubenswrapper[4689]: E1013 21:25:55.445777 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde387822_f6b5_43cc_80f4_2ba23cf2e43f.slice/crio-d57d1b84a6251caa0d13151e5a64b1d916a6b19cebb0744812885dc5d5e43e78\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde387822_f6b5_43cc_80f4_2ba23cf2e43f.slice\": RecentStats: unable to find data in memory cache]" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.472268 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84a133e7-f634-4caf-988b-f7c3f0176e52-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.472319 4689 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/84a133e7-f634-4caf-988b-f7c3f0176e52-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.472334 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgz5x\" (UniqueName: \"kubernetes.io/projected/84a133e7-f634-4caf-988b-f7c3f0176e52-kube-api-access-sgz5x\") on node \"crc\" DevicePath \"\"" Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.478272 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-x69m8"] Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.565914 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-d4cq8"] Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.627658 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 13 21:25:55 crc kubenswrapper[4689]: W1013 21:25:55.646738 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b50077e_96c6_4908_b3bd_5efa65b83fff.slice/crio-da58d0a78cd9b50f5593abe934338eb414b55880298115544c29fc8a17b8973c WatchSource:0}: Error finding container da58d0a78cd9b50f5593abe934338eb414b55880298115544c29fc8a17b8973c: Status 404 returned error can't find the container with id da58d0a78cd9b50f5593abe934338eb414b55880298115544c29fc8a17b8973c Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.654092 4689 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 21:25:55 crc kubenswrapper[4689]: I1013 21:25:55.877360 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de387822-f6b5-43cc-80f4-2ba23cf2e43f" path="/var/lib/kubelet/pods/de387822-f6b5-43cc-80f4-2ba23cf2e43f/volumes" Oct 13 21:25:56 crc kubenswrapper[4689]: I1013 21:25:56.015704 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"3b50077e-96c6-4908-b3bd-5efa65b83fff","Type":"ContainerStarted","Data":"da58d0a78cd9b50f5593abe934338eb414b55880298115544c29fc8a17b8973c"} Oct 13 21:25:56 crc kubenswrapper[4689]: I1013 21:25:56.025014 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" event={"ID":"84a133e7-f634-4caf-988b-f7c3f0176e52","Type":"ContainerDied","Data":"07e5c45a340e6afbf3a2e8c653da1c706fdf985ed91102b9b563705f082cb795"} Oct 13 21:25:56 crc kubenswrapper[4689]: I1013 21:25:56.025046 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-g24w7" Oct 13 21:25:56 crc kubenswrapper[4689]: I1013 21:25:56.025075 4689 scope.go:117] "RemoveContainer" containerID="e2991706dab0f218c7f4d16a982e010731cbe2b3e5c200b368e6e08e82bf7d4d" Oct 13 21:25:56 crc kubenswrapper[4689]: I1013 21:25:56.031189 4689 generic.go:334] "Generic (PLEG): container finished" podID="72978894-08d4-40e7-ab23-9d7325ced36d" containerID="7bf87a513a7d468d4932e859baa2d050ca3b50371cf170e8fd3e8def5aacbd77" exitCode=0 Oct 13 21:25:56 crc kubenswrapper[4689]: I1013 21:25:56.031266 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" event={"ID":"72978894-08d4-40e7-ab23-9d7325ced36d","Type":"ContainerDied","Data":"7bf87a513a7d468d4932e859baa2d050ca3b50371cf170e8fd3e8def5aacbd77"} Oct 13 21:25:56 crc kubenswrapper[4689]: I1013 21:25:56.031304 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" event={"ID":"72978894-08d4-40e7-ab23-9d7325ced36d","Type":"ContainerStarted","Data":"0af2ead8782ede722a5d37a20c44886798f2470d8621b9e198566a0d701e73d9"} Oct 13 21:25:56 crc kubenswrapper[4689]: I1013 21:25:56.034127 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-x69m8" event={"ID":"56a091d6-e531-4956-b5aa-15f43a9c1038","Type":"ContainerStarted","Data":"ebfdb789c100c3f16089a3043d263f993b15e68aeebd1b536d5f1cecb056ec04"} Oct 13 21:25:56 crc kubenswrapper[4689]: I1013 21:25:56.034171 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-x69m8" event={"ID":"56a091d6-e531-4956-b5aa-15f43a9c1038","Type":"ContainerStarted","Data":"b3d31db09d71e674cbbfc2c1ef47776734ac80cdf8cd6c61c1ae4caa19623821"} Oct 13 21:25:56 crc kubenswrapper[4689]: I1013 21:25:56.036371 4689 generic.go:334] "Generic (PLEG): container finished" podID="eb3f670f-4095-43d5-8376-b903cf972fc6" containerID="4639c84d0f1cf3c673af1939ed8f737799bc7349523d7e0574749a863aa24673" exitCode=0 Oct 13 21:25:56 crc kubenswrapper[4689]: I1013 21:25:56.036451 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" event={"ID":"eb3f670f-4095-43d5-8376-b903cf972fc6","Type":"ContainerDied","Data":"4639c84d0f1cf3c673af1939ed8f737799bc7349523d7e0574749a863aa24673"} Oct 13 21:25:56 crc kubenswrapper[4689]: I1013 21:25:56.036519 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" event={"ID":"eb3f670f-4095-43d5-8376-b903cf972fc6","Type":"ContainerStarted","Data":"e4be64685d102e1b079dd7dd1823dcf5d90e5c2189bbcceb529fe2c80ed879b2"} Oct 13 21:25:56 crc kubenswrapper[4689]: I1013 21:25:56.048164 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-g24w7"] Oct 13 21:25:56 crc kubenswrapper[4689]: I1013 21:25:56.050303 4689 scope.go:117] "RemoveContainer" containerID="bf51f2e2df262018853dc04b1dd67f48cee05ab3f3184ee75b3296eef3c5238c" Oct 13 21:25:56 crc kubenswrapper[4689]: I1013 21:25:56.062515 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-g24w7"] Oct 13 21:25:56 crc kubenswrapper[4689]: I1013 21:25:56.104220 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-x69m8" podStartSLOduration=2.104194635 podStartE2EDuration="2.104194635s" podCreationTimestamp="2025-10-13 21:25:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:25:56.089294133 +0000 UTC m=+873.007539218" watchObservedRunningTime="2025-10-13 21:25:56.104194635 +0000 UTC m=+873.022439720" Oct 13 21:25:57 crc kubenswrapper[4689]: I1013 21:25:57.044075 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" event={"ID":"eb3f670f-4095-43d5-8376-b903cf972fc6","Type":"ContainerStarted","Data":"7ad3d8f780e8d10d13b1110ea0a126a5f80b13666964b52d91605ca677c93764"} Oct 13 21:25:57 crc kubenswrapper[4689]: I1013 21:25:57.044506 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:25:57 crc kubenswrapper[4689]: I1013 21:25:57.047004 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" event={"ID":"72978894-08d4-40e7-ab23-9d7325ced36d","Type":"ContainerStarted","Data":"0bc3d08762b455262e47dac113507551da1771744c08af2319108fd6de43f053"} Oct 13 21:25:57 crc kubenswrapper[4689]: I1013 21:25:57.047287 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:25:57 crc kubenswrapper[4689]: I1013 21:25:57.067562 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" podStartSLOduration=3.067544526 podStartE2EDuration="3.067544526s" podCreationTimestamp="2025-10-13 21:25:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:25:57.062265861 +0000 UTC m=+873.980510956" watchObservedRunningTime="2025-10-13 21:25:57.067544526 +0000 UTC m=+873.985789611" Oct 13 21:25:57 crc kubenswrapper[4689]: I1013 21:25:57.079761 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" podStartSLOduration=3.079744604 podStartE2EDuration="3.079744604s" podCreationTimestamp="2025-10-13 21:25:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:25:57.077114672 +0000 UTC m=+873.995359757" watchObservedRunningTime="2025-10-13 21:25:57.079744604 +0000 UTC m=+873.997989689" Oct 13 21:25:57 crc kubenswrapper[4689]: I1013 21:25:57.884073 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84a133e7-f634-4caf-988b-f7c3f0176e52" path="/var/lib/kubelet/pods/84a133e7-f634-4caf-988b-f7c3f0176e52/volumes" Oct 13 21:25:58 crc kubenswrapper[4689]: I1013 21:25:58.058182 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"3b50077e-96c6-4908-b3bd-5efa65b83fff","Type":"ContainerStarted","Data":"dc46cd8206aadf0c96d3d3a91b596977783f1534afcb71203efe75316a7f2704"} Oct 13 21:25:58 crc kubenswrapper[4689]: I1013 21:25:58.058564 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"3b50077e-96c6-4908-b3bd-5efa65b83fff","Type":"ContainerStarted","Data":"01758a64148d63597e8484f2243563706562bdb037ecde5be433c2f98c9401ca"} Oct 13 21:25:58 crc kubenswrapper[4689]: I1013 21:25:58.058832 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 13 21:25:58 crc kubenswrapper[4689]: I1013 21:25:58.078804 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.85094625 podStartE2EDuration="4.078783336s" podCreationTimestamp="2025-10-13 21:25:54 +0000 UTC" firstStartedPulling="2025-10-13 21:25:55.653860978 +0000 UTC m=+872.572106053" lastFinishedPulling="2025-10-13 21:25:56.881698054 +0000 UTC m=+873.799943139" observedRunningTime="2025-10-13 21:25:58.078241814 +0000 UTC m=+874.996486979" watchObservedRunningTime="2025-10-13 21:25:58.078783336 +0000 UTC m=+874.997028421" Oct 13 21:25:58 crc kubenswrapper[4689]: I1013 21:25:58.143295 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 13 21:25:58 crc kubenswrapper[4689]: I1013 21:25:58.143363 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 13 21:25:58 crc kubenswrapper[4689]: I1013 21:25:58.220769 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.126523 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.576169 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-vg98r"] Oct 13 21:25:59 crc kubenswrapper[4689]: E1013 21:25:59.576879 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a133e7-f634-4caf-988b-f7c3f0176e52" containerName="init" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.576897 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a133e7-f634-4caf-988b-f7c3f0176e52" containerName="init" Oct 13 21:25:59 crc kubenswrapper[4689]: E1013 21:25:59.576914 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de387822-f6b5-43cc-80f4-2ba23cf2e43f" containerName="dnsmasq-dns" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.576923 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="de387822-f6b5-43cc-80f4-2ba23cf2e43f" containerName="dnsmasq-dns" Oct 13 21:25:59 crc kubenswrapper[4689]: E1013 21:25:59.576939 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a133e7-f634-4caf-988b-f7c3f0176e52" containerName="dnsmasq-dns" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.576947 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a133e7-f634-4caf-988b-f7c3f0176e52" containerName="dnsmasq-dns" Oct 13 21:25:59 crc kubenswrapper[4689]: E1013 21:25:59.576967 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de387822-f6b5-43cc-80f4-2ba23cf2e43f" containerName="init" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.576974 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="de387822-f6b5-43cc-80f4-2ba23cf2e43f" containerName="init" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.577147 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="84a133e7-f634-4caf-988b-f7c3f0176e52" containerName="dnsmasq-dns" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.577159 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="de387822-f6b5-43cc-80f4-2ba23cf2e43f" containerName="dnsmasq-dns" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.577863 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-vg98r" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.590755 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-vg98r"] Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.644541 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnwh5\" (UniqueName: \"kubernetes.io/projected/9eac6513-46b3-4591-bf02-7a5cd0200e96-kube-api-access-lnwh5\") pod \"keystone-db-create-vg98r\" (UID: \"9eac6513-46b3-4591-bf02-7a5cd0200e96\") " pod="openstack/keystone-db-create-vg98r" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.746460 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnwh5\" (UniqueName: \"kubernetes.io/projected/9eac6513-46b3-4591-bf02-7a5cd0200e96-kube-api-access-lnwh5\") pod \"keystone-db-create-vg98r\" (UID: \"9eac6513-46b3-4591-bf02-7a5cd0200e96\") " pod="openstack/keystone-db-create-vg98r" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.766857 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnwh5\" (UniqueName: \"kubernetes.io/projected/9eac6513-46b3-4591-bf02-7a5cd0200e96-kube-api-access-lnwh5\") pod \"keystone-db-create-vg98r\" (UID: \"9eac6513-46b3-4591-bf02-7a5cd0200e96\") " pod="openstack/keystone-db-create-vg98r" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.800126 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.800187 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.861911 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.908876 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-229q9"] Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.910533 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-vg98r" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.910640 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-229q9" Oct 13 21:25:59 crc kubenswrapper[4689]: I1013 21:25:59.925239 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-229q9"] Oct 13 21:26:00 crc kubenswrapper[4689]: I1013 21:26:00.051723 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssb2x\" (UniqueName: \"kubernetes.io/projected/d7f7455f-653a-4658-8864-e322e3b1fcb2-kube-api-access-ssb2x\") pod \"placement-db-create-229q9\" (UID: \"d7f7455f-653a-4658-8864-e322e3b1fcb2\") " pod="openstack/placement-db-create-229q9" Oct 13 21:26:00 crc kubenswrapper[4689]: I1013 21:26:00.127183 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 13 21:26:00 crc kubenswrapper[4689]: I1013 21:26:00.153485 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssb2x\" (UniqueName: \"kubernetes.io/projected/d7f7455f-653a-4658-8864-e322e3b1fcb2-kube-api-access-ssb2x\") pod \"placement-db-create-229q9\" (UID: \"d7f7455f-653a-4658-8864-e322e3b1fcb2\") " pod="openstack/placement-db-create-229q9" Oct 13 21:26:00 crc kubenswrapper[4689]: I1013 21:26:00.173785 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssb2x\" (UniqueName: \"kubernetes.io/projected/d7f7455f-653a-4658-8864-e322e3b1fcb2-kube-api-access-ssb2x\") pod \"placement-db-create-229q9\" (UID: \"d7f7455f-653a-4658-8864-e322e3b1fcb2\") " pod="openstack/placement-db-create-229q9" Oct 13 21:26:00 crc kubenswrapper[4689]: I1013 21:26:00.232921 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-229q9" Oct 13 21:26:00 crc kubenswrapper[4689]: I1013 21:26:00.342543 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-vg98r"] Oct 13 21:26:00 crc kubenswrapper[4689]: I1013 21:26:00.732888 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-229q9"] Oct 13 21:26:00 crc kubenswrapper[4689]: W1013 21:26:00.745255 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7f7455f_653a_4658_8864_e322e3b1fcb2.slice/crio-0fa60ddec8161a706cffb5cb0e27e7acb1447b64a73233e6ee99cf6cc55b0db7 WatchSource:0}: Error finding container 0fa60ddec8161a706cffb5cb0e27e7acb1447b64a73233e6ee99cf6cc55b0db7: Status 404 returned error can't find the container with id 0fa60ddec8161a706cffb5cb0e27e7acb1447b64a73233e6ee99cf6cc55b0db7 Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.091359 4689 generic.go:334] "Generic (PLEG): container finished" podID="d7f7455f-653a-4658-8864-e322e3b1fcb2" containerID="cfc09172221453f7f745ee5d9273c38cff8d9e942d4bef5b6725f2c6f221f24e" exitCode=0 Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.091406 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-229q9" event={"ID":"d7f7455f-653a-4658-8864-e322e3b1fcb2","Type":"ContainerDied","Data":"cfc09172221453f7f745ee5d9273c38cff8d9e942d4bef5b6725f2c6f221f24e"} Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.091451 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-229q9" event={"ID":"d7f7455f-653a-4658-8864-e322e3b1fcb2","Type":"ContainerStarted","Data":"0fa60ddec8161a706cffb5cb0e27e7acb1447b64a73233e6ee99cf6cc55b0db7"} Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.096975 4689 generic.go:334] "Generic (PLEG): container finished" podID="9eac6513-46b3-4591-bf02-7a5cd0200e96" containerID="fdd59ac39d2c91bb1af62d5eb898feba32fd0e8c6835d58f0b7d7ecc38decd62" exitCode=0 Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.097023 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-vg98r" event={"ID":"9eac6513-46b3-4591-bf02-7a5cd0200e96","Type":"ContainerDied","Data":"fdd59ac39d2c91bb1af62d5eb898feba32fd0e8c6835d58f0b7d7ecc38decd62"} Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.097050 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-vg98r" event={"ID":"9eac6513-46b3-4591-bf02-7a5cd0200e96","Type":"ContainerStarted","Data":"4183f52d18844f0d456c99e1097bbe5d330cdfdc175daefc2c80fe8da420f729"} Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.666524 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.731532 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-k9l2j"] Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.731784 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" podUID="eb3f670f-4095-43d5-8376-b903cf972fc6" containerName="dnsmasq-dns" containerID="cri-o://7ad3d8f780e8d10d13b1110ea0a126a5f80b13666964b52d91605ca677c93764" gracePeriod=10 Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.732713 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.775142 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-lwszp"] Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.776489 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.791284 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-lwszp"] Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.884364 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-lwszp\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.884489 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dt527\" (UniqueName: \"kubernetes.io/projected/8172cb34-22e4-4710-a9b2-94cb60495469-kube-api-access-dt527\") pod \"dnsmasq-dns-698758b865-lwszp\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.884552 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-dns-svc\") pod \"dnsmasq-dns-698758b865-lwszp\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.884644 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-lwszp\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.884725 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-config\") pod \"dnsmasq-dns-698758b865-lwszp\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.985862 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-lwszp\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.986058 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dt527\" (UniqueName: \"kubernetes.io/projected/8172cb34-22e4-4710-a9b2-94cb60495469-kube-api-access-dt527\") pod \"dnsmasq-dns-698758b865-lwszp\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.986177 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-dns-svc\") pod \"dnsmasq-dns-698758b865-lwszp\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.986243 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-lwszp\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.986360 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-config\") pod \"dnsmasq-dns-698758b865-lwszp\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.987396 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-config\") pod \"dnsmasq-dns-698758b865-lwszp\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.987412 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-lwszp\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.987464 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-dns-svc\") pod \"dnsmasq-dns-698758b865-lwszp\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:01 crc kubenswrapper[4689]: I1013 21:26:01.988259 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-lwszp\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.005564 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dt527\" (UniqueName: \"kubernetes.io/projected/8172cb34-22e4-4710-a9b2-94cb60495469-kube-api-access-dt527\") pod \"dnsmasq-dns-698758b865-lwszp\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.111448 4689 generic.go:334] "Generic (PLEG): container finished" podID="eb3f670f-4095-43d5-8376-b903cf972fc6" containerID="7ad3d8f780e8d10d13b1110ea0a126a5f80b13666964b52d91605ca677c93764" exitCode=0 Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.111842 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" event={"ID":"eb3f670f-4095-43d5-8376-b903cf972fc6","Type":"ContainerDied","Data":"7ad3d8f780e8d10d13b1110ea0a126a5f80b13666964b52d91605ca677c93764"} Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.112447 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.239952 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.290721 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-ovsdbserver-sb\") pod \"eb3f670f-4095-43d5-8376-b903cf972fc6\" (UID: \"eb3f670f-4095-43d5-8376-b903cf972fc6\") " Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.291538 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtb2s\" (UniqueName: \"kubernetes.io/projected/eb3f670f-4095-43d5-8376-b903cf972fc6-kube-api-access-dtb2s\") pod \"eb3f670f-4095-43d5-8376-b903cf972fc6\" (UID: \"eb3f670f-4095-43d5-8376-b903cf972fc6\") " Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.291788 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-dns-svc\") pod \"eb3f670f-4095-43d5-8376-b903cf972fc6\" (UID: \"eb3f670f-4095-43d5-8376-b903cf972fc6\") " Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.291818 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-config\") pod \"eb3f670f-4095-43d5-8376-b903cf972fc6\" (UID: \"eb3f670f-4095-43d5-8376-b903cf972fc6\") " Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.301202 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb3f670f-4095-43d5-8376-b903cf972fc6-kube-api-access-dtb2s" (OuterVolumeSpecName: "kube-api-access-dtb2s") pod "eb3f670f-4095-43d5-8376-b903cf972fc6" (UID: "eb3f670f-4095-43d5-8376-b903cf972fc6"). InnerVolumeSpecName "kube-api-access-dtb2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.334960 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-config" (OuterVolumeSpecName: "config") pod "eb3f670f-4095-43d5-8376-b903cf972fc6" (UID: "eb3f670f-4095-43d5-8376-b903cf972fc6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.340046 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "eb3f670f-4095-43d5-8376-b903cf972fc6" (UID: "eb3f670f-4095-43d5-8376-b903cf972fc6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.350507 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "eb3f670f-4095-43d5-8376-b903cf972fc6" (UID: "eb3f670f-4095-43d5-8376-b903cf972fc6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.394797 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtb2s\" (UniqueName: \"kubernetes.io/projected/eb3f670f-4095-43d5-8376-b903cf972fc6-kube-api-access-dtb2s\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.394845 4689 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.394858 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.394871 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb3f670f-4095-43d5-8376-b903cf972fc6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.551163 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-229q9" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.594603 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-vg98r" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.598355 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssb2x\" (UniqueName: \"kubernetes.io/projected/d7f7455f-653a-4658-8864-e322e3b1fcb2-kube-api-access-ssb2x\") pod \"d7f7455f-653a-4658-8864-e322e3b1fcb2\" (UID: \"d7f7455f-653a-4658-8864-e322e3b1fcb2\") " Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.602759 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7f7455f-653a-4658-8864-e322e3b1fcb2-kube-api-access-ssb2x" (OuterVolumeSpecName: "kube-api-access-ssb2x") pod "d7f7455f-653a-4658-8864-e322e3b1fcb2" (UID: "d7f7455f-653a-4658-8864-e322e3b1fcb2"). InnerVolumeSpecName "kube-api-access-ssb2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.699955 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnwh5\" (UniqueName: \"kubernetes.io/projected/9eac6513-46b3-4591-bf02-7a5cd0200e96-kube-api-access-lnwh5\") pod \"9eac6513-46b3-4591-bf02-7a5cd0200e96\" (UID: \"9eac6513-46b3-4591-bf02-7a5cd0200e96\") " Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.700430 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssb2x\" (UniqueName: \"kubernetes.io/projected/d7f7455f-653a-4658-8864-e322e3b1fcb2-kube-api-access-ssb2x\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.703824 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9eac6513-46b3-4591-bf02-7a5cd0200e96-kube-api-access-lnwh5" (OuterVolumeSpecName: "kube-api-access-lnwh5") pod "9eac6513-46b3-4591-bf02-7a5cd0200e96" (UID: "9eac6513-46b3-4591-bf02-7a5cd0200e96"). InnerVolumeSpecName "kube-api-access-lnwh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.759997 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-lwszp"] Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.801969 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnwh5\" (UniqueName: \"kubernetes.io/projected/9eac6513-46b3-4591-bf02-7a5cd0200e96-kube-api-access-lnwh5\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.920157 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Oct 13 21:26:02 crc kubenswrapper[4689]: E1013 21:26:02.920557 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb3f670f-4095-43d5-8376-b903cf972fc6" containerName="dnsmasq-dns" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.920581 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb3f670f-4095-43d5-8376-b903cf972fc6" containerName="dnsmasq-dns" Oct 13 21:26:02 crc kubenswrapper[4689]: E1013 21:26:02.920640 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9eac6513-46b3-4591-bf02-7a5cd0200e96" containerName="mariadb-database-create" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.920649 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="9eac6513-46b3-4591-bf02-7a5cd0200e96" containerName="mariadb-database-create" Oct 13 21:26:02 crc kubenswrapper[4689]: E1013 21:26:02.920677 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7f7455f-653a-4658-8864-e322e3b1fcb2" containerName="mariadb-database-create" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.920688 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7f7455f-653a-4658-8864-e322e3b1fcb2" containerName="mariadb-database-create" Oct 13 21:26:02 crc kubenswrapper[4689]: E1013 21:26:02.920709 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb3f670f-4095-43d5-8376-b903cf972fc6" containerName="init" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.920718 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb3f670f-4095-43d5-8376-b903cf972fc6" containerName="init" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.920918 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb3f670f-4095-43d5-8376-b903cf972fc6" containerName="dnsmasq-dns" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.920942 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7f7455f-653a-4658-8864-e322e3b1fcb2" containerName="mariadb-database-create" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.920955 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="9eac6513-46b3-4591-bf02-7a5cd0200e96" containerName="mariadb-database-create" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.926943 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.928943 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.929102 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.929201 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.929391 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-x6hb9" Oct 13 21:26:02 crc kubenswrapper[4689]: I1013 21:26:02.946507 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.006519 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-lock\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.006933 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-cache\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.007071 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hj4w5\" (UniqueName: \"kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-kube-api-access-hj4w5\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.007201 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.007334 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.109067 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-lock\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.109142 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-cache\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.109182 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hj4w5\" (UniqueName: \"kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-kube-api-access-hj4w5\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.109231 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.109270 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.109701 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/swift-storage-0" Oct 13 21:26:03 crc kubenswrapper[4689]: E1013 21:26:03.109954 4689 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 13 21:26:03 crc kubenswrapper[4689]: E1013 21:26:03.109996 4689 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 13 21:26:03 crc kubenswrapper[4689]: E1013 21:26:03.110067 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift podName:d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5 nodeName:}" failed. No retries permitted until 2025-10-13 21:26:03.610037872 +0000 UTC m=+880.528282977 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift") pod "swift-storage-0" (UID: "d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5") : configmap "swift-ring-files" not found Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.110496 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-cache\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.110574 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-lock\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.121857 4689 generic.go:334] "Generic (PLEG): container finished" podID="8172cb34-22e4-4710-a9b2-94cb60495469" containerID="6fde0c4e5f8bec9732978d15cfba23d3a4095bc8633a02dde99f73ced0051fd0" exitCode=0 Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.121943 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-lwszp" event={"ID":"8172cb34-22e4-4710-a9b2-94cb60495469","Type":"ContainerDied","Data":"6fde0c4e5f8bec9732978d15cfba23d3a4095bc8633a02dde99f73ced0051fd0"} Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.122222 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-lwszp" event={"ID":"8172cb34-22e4-4710-a9b2-94cb60495469","Type":"ContainerStarted","Data":"0e16f1edf8666a4e36ea5d36a7523406ed9967555dd8561ac4e1f0fc1e6ad710"} Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.123614 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-vg98r" event={"ID":"9eac6513-46b3-4591-bf02-7a5cd0200e96","Type":"ContainerDied","Data":"4183f52d18844f0d456c99e1097bbe5d330cdfdc175daefc2c80fe8da420f729"} Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.123643 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-vg98r" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.123648 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4183f52d18844f0d456c99e1097bbe5d330cdfdc175daefc2c80fe8da420f729" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.127321 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.127494 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-k9l2j" event={"ID":"eb3f670f-4095-43d5-8376-b903cf972fc6","Type":"ContainerDied","Data":"e4be64685d102e1b079dd7dd1823dcf5d90e5c2189bbcceb529fe2c80ed879b2"} Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.127630 4689 scope.go:117] "RemoveContainer" containerID="7ad3d8f780e8d10d13b1110ea0a126a5f80b13666964b52d91605ca677c93764" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.137200 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-229q9" event={"ID":"d7f7455f-653a-4658-8864-e322e3b1fcb2","Type":"ContainerDied","Data":"0fa60ddec8161a706cffb5cb0e27e7acb1447b64a73233e6ee99cf6cc55b0db7"} Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.137407 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0fa60ddec8161a706cffb5cb0e27e7acb1447b64a73233e6ee99cf6cc55b0db7" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.137536 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-229q9" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.136975 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.177462 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hj4w5\" (UniqueName: \"kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-kube-api-access-hj4w5\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.243132 4689 scope.go:117] "RemoveContainer" containerID="4639c84d0f1cf3c673af1939ed8f737799bc7349523d7e0574749a863aa24673" Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.263240 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-k9l2j"] Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.281139 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-k9l2j"] Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.617274 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:03 crc kubenswrapper[4689]: E1013 21:26:03.617467 4689 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 13 21:26:03 crc kubenswrapper[4689]: E1013 21:26:03.617730 4689 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 13 21:26:03 crc kubenswrapper[4689]: E1013 21:26:03.617817 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift podName:d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5 nodeName:}" failed. No retries permitted until 2025-10-13 21:26:04.617788191 +0000 UTC m=+881.536033276 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift") pod "swift-storage-0" (UID: "d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5") : configmap "swift-ring-files" not found Oct 13 21:26:03 crc kubenswrapper[4689]: I1013 21:26:03.881721 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb3f670f-4095-43d5-8376-b903cf972fc6" path="/var/lib/kubelet/pods/eb3f670f-4095-43d5-8376-b903cf972fc6/volumes" Oct 13 21:26:04 crc kubenswrapper[4689]: I1013 21:26:04.147373 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-lwszp" event={"ID":"8172cb34-22e4-4710-a9b2-94cb60495469","Type":"ContainerStarted","Data":"831d1039236b73c796e9c9e1839ed94aa2f812b31839892e5ee3c690792476a7"} Oct 13 21:26:04 crc kubenswrapper[4689]: I1013 21:26:04.148106 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:04 crc kubenswrapper[4689]: I1013 21:26:04.167747 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-lwszp" podStartSLOduration=3.167728527 podStartE2EDuration="3.167728527s" podCreationTimestamp="2025-10-13 21:26:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:26:04.163412465 +0000 UTC m=+881.081657560" watchObservedRunningTime="2025-10-13 21:26:04.167728527 +0000 UTC m=+881.085973612" Oct 13 21:26:04 crc kubenswrapper[4689]: I1013 21:26:04.633135 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:04 crc kubenswrapper[4689]: E1013 21:26:04.633307 4689 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 13 21:26:04 crc kubenswrapper[4689]: E1013 21:26:04.633340 4689 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 13 21:26:04 crc kubenswrapper[4689]: E1013 21:26:04.633394 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift podName:d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5 nodeName:}" failed. No retries permitted until 2025-10-13 21:26:06.633376385 +0000 UTC m=+883.551621470 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift") pod "swift-storage-0" (UID: "d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5") : configmap "swift-ring-files" not found Oct 13 21:26:05 crc kubenswrapper[4689]: I1013 21:26:05.079677 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-fjttp"] Oct 13 21:26:05 crc kubenswrapper[4689]: I1013 21:26:05.081810 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-fjttp" Oct 13 21:26:05 crc kubenswrapper[4689]: I1013 21:26:05.088686 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-fjttp"] Oct 13 21:26:05 crc kubenswrapper[4689]: I1013 21:26:05.099877 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:26:05 crc kubenswrapper[4689]: I1013 21:26:05.141198 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbwmw\" (UniqueName: \"kubernetes.io/projected/7a4aaec5-dc5e-4500-9aab-16a83f4d60f6-kube-api-access-vbwmw\") pod \"glance-db-create-fjttp\" (UID: \"7a4aaec5-dc5e-4500-9aab-16a83f4d60f6\") " pod="openstack/glance-db-create-fjttp" Oct 13 21:26:05 crc kubenswrapper[4689]: I1013 21:26:05.243238 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbwmw\" (UniqueName: \"kubernetes.io/projected/7a4aaec5-dc5e-4500-9aab-16a83f4d60f6-kube-api-access-vbwmw\") pod \"glance-db-create-fjttp\" (UID: \"7a4aaec5-dc5e-4500-9aab-16a83f4d60f6\") " pod="openstack/glance-db-create-fjttp" Oct 13 21:26:05 crc kubenswrapper[4689]: I1013 21:26:05.262431 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbwmw\" (UniqueName: \"kubernetes.io/projected/7a4aaec5-dc5e-4500-9aab-16a83f4d60f6-kube-api-access-vbwmw\") pod \"glance-db-create-fjttp\" (UID: \"7a4aaec5-dc5e-4500-9aab-16a83f4d60f6\") " pod="openstack/glance-db-create-fjttp" Oct 13 21:26:05 crc kubenswrapper[4689]: I1013 21:26:05.404081 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-fjttp" Oct 13 21:26:05 crc kubenswrapper[4689]: I1013 21:26:05.845387 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-fjttp"] Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.164000 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-fjttp" event={"ID":"7a4aaec5-dc5e-4500-9aab-16a83f4d60f6","Type":"ContainerStarted","Data":"c8c7084b223ce09fcf9b8879ceb60f83c486cdd401f3b1852e792a95626ad85e"} Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.164038 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-fjttp" event={"ID":"7a4aaec5-dc5e-4500-9aab-16a83f4d60f6","Type":"ContainerStarted","Data":"71fc818888e65ca1d40b68ba06b8d4e83d441ea0bbf32bcb6b78de297d085c5c"} Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.183403 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-fjttp" podStartSLOduration=1.183386947 podStartE2EDuration="1.183386947s" podCreationTimestamp="2025-10-13 21:26:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:26:06.17968968 +0000 UTC m=+883.097934795" watchObservedRunningTime="2025-10-13 21:26:06.183386947 +0000 UTC m=+883.101632032" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.670572 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:06 crc kubenswrapper[4689]: E1013 21:26:06.670788 4689 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 13 21:26:06 crc kubenswrapper[4689]: E1013 21:26:06.670826 4689 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 13 21:26:06 crc kubenswrapper[4689]: E1013 21:26:06.670890 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift podName:d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5 nodeName:}" failed. No retries permitted until 2025-10-13 21:26:10.67087196 +0000 UTC m=+887.589117045 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift") pod "swift-storage-0" (UID: "d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5") : configmap "swift-ring-files" not found Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.828884 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-ms7xx"] Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.830500 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.833671 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.833772 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.834087 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.870164 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-ms7xx"] Oct 13 21:26:06 crc kubenswrapper[4689]: E1013 21:26:06.870952 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-8lrkq ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/swift-ring-rebalance-ms7xx" podUID="a9058a3c-f47f-4f38-812a-0c2a6126725b" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.874147 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a9058a3c-f47f-4f38-812a-0c2a6126725b-ring-data-devices\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.874491 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-dispersionconf\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.874647 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-combined-ca-bundle\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.874769 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a9058a3c-f47f-4f38-812a-0c2a6126725b-etc-swift\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.874932 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lrkq\" (UniqueName: \"kubernetes.io/projected/a9058a3c-f47f-4f38-812a-0c2a6126725b-kube-api-access-8lrkq\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.875083 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-swiftconf\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.875184 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9058a3c-f47f-4f38-812a-0c2a6126725b-scripts\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.879846 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-8rq2n"] Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.881077 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.889299 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-ms7xx"] Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.897521 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-8rq2n"] Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.976283 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-swiftconf\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.976715 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-swiftconf\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.976938 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9058a3c-f47f-4f38-812a-0c2a6126725b-scripts\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.977054 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g2kn\" (UniqueName: \"kubernetes.io/projected/0817e909-9d71-4ddd-b3e7-49e41383b1da-kube-api-access-2g2kn\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.977161 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-combined-ca-bundle\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.977270 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0817e909-9d71-4ddd-b3e7-49e41383b1da-ring-data-devices\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.977472 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0817e909-9d71-4ddd-b3e7-49e41383b1da-scripts\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.977574 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-dispersionconf\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.977617 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a9058a3c-f47f-4f38-812a-0c2a6126725b-ring-data-devices\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.977821 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-dispersionconf\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.977857 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-combined-ca-bundle\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.977885 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a9058a3c-f47f-4f38-812a-0c2a6126725b-etc-swift\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.977904 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lrkq\" (UniqueName: \"kubernetes.io/projected/a9058a3c-f47f-4f38-812a-0c2a6126725b-kube-api-access-8lrkq\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.977927 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0817e909-9d71-4ddd-b3e7-49e41383b1da-etc-swift\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.978459 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a9058a3c-f47f-4f38-812a-0c2a6126725b-etc-swift\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.978675 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a9058a3c-f47f-4f38-812a-0c2a6126725b-ring-data-devices\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.979023 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9058a3c-f47f-4f38-812a-0c2a6126725b-scripts\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.983550 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-dispersionconf\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.983743 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-combined-ca-bundle\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.983893 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-swiftconf\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:06 crc kubenswrapper[4689]: I1013 21:26:06.994004 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lrkq\" (UniqueName: \"kubernetes.io/projected/a9058a3c-f47f-4f38-812a-0c2a6126725b-kube-api-access-8lrkq\") pod \"swift-ring-rebalance-ms7xx\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.079684 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0817e909-9d71-4ddd-b3e7-49e41383b1da-etc-swift\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.079765 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-swiftconf\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.079866 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g2kn\" (UniqueName: \"kubernetes.io/projected/0817e909-9d71-4ddd-b3e7-49e41383b1da-kube-api-access-2g2kn\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.079897 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-combined-ca-bundle\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.079921 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0817e909-9d71-4ddd-b3e7-49e41383b1da-ring-data-devices\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.079960 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0817e909-9d71-4ddd-b3e7-49e41383b1da-scripts\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.080073 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-dispersionconf\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.081853 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0817e909-9d71-4ddd-b3e7-49e41383b1da-etc-swift\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.083442 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0817e909-9d71-4ddd-b3e7-49e41383b1da-ring-data-devices\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.085092 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0817e909-9d71-4ddd-b3e7-49e41383b1da-scripts\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.085430 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-swiftconf\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.086180 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-combined-ca-bundle\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.086552 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-dispersionconf\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.141063 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g2kn\" (UniqueName: \"kubernetes.io/projected/0817e909-9d71-4ddd-b3e7-49e41383b1da-kube-api-access-2g2kn\") pod \"swift-ring-rebalance-8rq2n\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.173874 4689 generic.go:334] "Generic (PLEG): container finished" podID="7a4aaec5-dc5e-4500-9aab-16a83f4d60f6" containerID="c8c7084b223ce09fcf9b8879ceb60f83c486cdd401f3b1852e792a95626ad85e" exitCode=0 Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.173948 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.174504 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-fjttp" event={"ID":"7a4aaec5-dc5e-4500-9aab-16a83f4d60f6","Type":"ContainerDied","Data":"c8c7084b223ce09fcf9b8879ceb60f83c486cdd401f3b1852e792a95626ad85e"} Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.186092 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.194971 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.282655 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-swiftconf\") pod \"a9058a3c-f47f-4f38-812a-0c2a6126725b\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.282735 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a9058a3c-f47f-4f38-812a-0c2a6126725b-ring-data-devices\") pod \"a9058a3c-f47f-4f38-812a-0c2a6126725b\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.282862 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a9058a3c-f47f-4f38-812a-0c2a6126725b-etc-swift\") pod \"a9058a3c-f47f-4f38-812a-0c2a6126725b\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.282912 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lrkq\" (UniqueName: \"kubernetes.io/projected/a9058a3c-f47f-4f38-812a-0c2a6126725b-kube-api-access-8lrkq\") pod \"a9058a3c-f47f-4f38-812a-0c2a6126725b\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.282932 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9058a3c-f47f-4f38-812a-0c2a6126725b-scripts\") pod \"a9058a3c-f47f-4f38-812a-0c2a6126725b\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.282958 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-dispersionconf\") pod \"a9058a3c-f47f-4f38-812a-0c2a6126725b\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.283013 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-combined-ca-bundle\") pod \"a9058a3c-f47f-4f38-812a-0c2a6126725b\" (UID: \"a9058a3c-f47f-4f38-812a-0c2a6126725b\") " Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.284056 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9058a3c-f47f-4f38-812a-0c2a6126725b-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "a9058a3c-f47f-4f38-812a-0c2a6126725b" (UID: "a9058a3c-f47f-4f38-812a-0c2a6126725b"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.284159 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9058a3c-f47f-4f38-812a-0c2a6126725b-scripts" (OuterVolumeSpecName: "scripts") pod "a9058a3c-f47f-4f38-812a-0c2a6126725b" (UID: "a9058a3c-f47f-4f38-812a-0c2a6126725b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.284559 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9058a3c-f47f-4f38-812a-0c2a6126725b-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "a9058a3c-f47f-4f38-812a-0c2a6126725b" (UID: "a9058a3c-f47f-4f38-812a-0c2a6126725b"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.286452 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "a9058a3c-f47f-4f38-812a-0c2a6126725b" (UID: "a9058a3c-f47f-4f38-812a-0c2a6126725b"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.288058 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9058a3c-f47f-4f38-812a-0c2a6126725b" (UID: "a9058a3c-f47f-4f38-812a-0c2a6126725b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.288740 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "a9058a3c-f47f-4f38-812a-0c2a6126725b" (UID: "a9058a3c-f47f-4f38-812a-0c2a6126725b"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.295073 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9058a3c-f47f-4f38-812a-0c2a6126725b-kube-api-access-8lrkq" (OuterVolumeSpecName: "kube-api-access-8lrkq") pod "a9058a3c-f47f-4f38-812a-0c2a6126725b" (UID: "a9058a3c-f47f-4f38-812a-0c2a6126725b"). InnerVolumeSpecName "kube-api-access-8lrkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.386819 4689 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.387191 4689 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a9058a3c-f47f-4f38-812a-0c2a6126725b-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.387206 4689 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a9058a3c-f47f-4f38-812a-0c2a6126725b-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.387289 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lrkq\" (UniqueName: \"kubernetes.io/projected/a9058a3c-f47f-4f38-812a-0c2a6126725b-kube-api-access-8lrkq\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.387305 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9058a3c-f47f-4f38-812a-0c2a6126725b-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.387315 4689 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.387326 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9058a3c-f47f-4f38-812a-0c2a6126725b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:07 crc kubenswrapper[4689]: I1013 21:26:07.650279 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-8rq2n"] Oct 13 21:26:08 crc kubenswrapper[4689]: I1013 21:26:08.187540 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-8rq2n" event={"ID":"0817e909-9d71-4ddd-b3e7-49e41383b1da","Type":"ContainerStarted","Data":"53676dd50d14d76cd4e024ad405702654a883141182a163ba1961adde74e39a4"} Oct 13 21:26:08 crc kubenswrapper[4689]: I1013 21:26:08.187606 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-ms7xx" Oct 13 21:26:08 crc kubenswrapper[4689]: I1013 21:26:08.229411 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-ms7xx"] Oct 13 21:26:08 crc kubenswrapper[4689]: I1013 21:26:08.239940 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-ms7xx"] Oct 13 21:26:08 crc kubenswrapper[4689]: I1013 21:26:08.572323 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-fjttp" Oct 13 21:26:08 crc kubenswrapper[4689]: I1013 21:26:08.605458 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbwmw\" (UniqueName: \"kubernetes.io/projected/7a4aaec5-dc5e-4500-9aab-16a83f4d60f6-kube-api-access-vbwmw\") pod \"7a4aaec5-dc5e-4500-9aab-16a83f4d60f6\" (UID: \"7a4aaec5-dc5e-4500-9aab-16a83f4d60f6\") " Oct 13 21:26:08 crc kubenswrapper[4689]: I1013 21:26:08.611997 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a4aaec5-dc5e-4500-9aab-16a83f4d60f6-kube-api-access-vbwmw" (OuterVolumeSpecName: "kube-api-access-vbwmw") pod "7a4aaec5-dc5e-4500-9aab-16a83f4d60f6" (UID: "7a4aaec5-dc5e-4500-9aab-16a83f4d60f6"). InnerVolumeSpecName "kube-api-access-vbwmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:08 crc kubenswrapper[4689]: I1013 21:26:08.707967 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbwmw\" (UniqueName: \"kubernetes.io/projected/7a4aaec5-dc5e-4500-9aab-16a83f4d60f6-kube-api-access-vbwmw\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.204410 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-fjttp" event={"ID":"7a4aaec5-dc5e-4500-9aab-16a83f4d60f6","Type":"ContainerDied","Data":"71fc818888e65ca1d40b68ba06b8d4e83d441ea0bbf32bcb6b78de297d085c5c"} Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.204470 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71fc818888e65ca1d40b68ba06b8d4e83d441ea0bbf32bcb6b78de297d085c5c" Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.204442 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-fjttp" Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.537773 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-004c-account-create-xpm4r"] Oct 13 21:26:09 crc kubenswrapper[4689]: E1013 21:26:09.538230 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a4aaec5-dc5e-4500-9aab-16a83f4d60f6" containerName="mariadb-database-create" Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.538248 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a4aaec5-dc5e-4500-9aab-16a83f4d60f6" containerName="mariadb-database-create" Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.538467 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a4aaec5-dc5e-4500-9aab-16a83f4d60f6" containerName="mariadb-database-create" Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.539174 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-004c-account-create-xpm4r" Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.541262 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.545411 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-004c-account-create-xpm4r"] Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.630264 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcr5j\" (UniqueName: \"kubernetes.io/projected/8ad8ad88-3a13-433c-b063-3303125e8ee6-kube-api-access-dcr5j\") pod \"keystone-004c-account-create-xpm4r\" (UID: \"8ad8ad88-3a13-433c-b063-3303125e8ee6\") " pod="openstack/keystone-004c-account-create-xpm4r" Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.732889 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcr5j\" (UniqueName: \"kubernetes.io/projected/8ad8ad88-3a13-433c-b063-3303125e8ee6-kube-api-access-dcr5j\") pod \"keystone-004c-account-create-xpm4r\" (UID: \"8ad8ad88-3a13-433c-b063-3303125e8ee6\") " pod="openstack/keystone-004c-account-create-xpm4r" Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.755085 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcr5j\" (UniqueName: \"kubernetes.io/projected/8ad8ad88-3a13-433c-b063-3303125e8ee6-kube-api-access-dcr5j\") pod \"keystone-004c-account-create-xpm4r\" (UID: \"8ad8ad88-3a13-433c-b063-3303125e8ee6\") " pod="openstack/keystone-004c-account-create-xpm4r" Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.879704 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9058a3c-f47f-4f38-812a-0c2a6126725b" path="/var/lib/kubelet/pods/a9058a3c-f47f-4f38-812a-0c2a6126725b/volumes" Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.883158 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-004c-account-create-xpm4r" Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.931333 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-9cd0-account-create-kjc4q"] Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.933748 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9cd0-account-create-kjc4q" Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.936268 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 13 21:26:09 crc kubenswrapper[4689]: I1013 21:26:09.939237 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-9cd0-account-create-kjc4q"] Oct 13 21:26:10 crc kubenswrapper[4689]: I1013 21:26:10.038693 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gg2m\" (UniqueName: \"kubernetes.io/projected/b828e6e8-5980-4eb9-a91d-8a0c503c5432-kube-api-access-2gg2m\") pod \"placement-9cd0-account-create-kjc4q\" (UID: \"b828e6e8-5980-4eb9-a91d-8a0c503c5432\") " pod="openstack/placement-9cd0-account-create-kjc4q" Oct 13 21:26:10 crc kubenswrapper[4689]: I1013 21:26:10.140806 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gg2m\" (UniqueName: \"kubernetes.io/projected/b828e6e8-5980-4eb9-a91d-8a0c503c5432-kube-api-access-2gg2m\") pod \"placement-9cd0-account-create-kjc4q\" (UID: \"b828e6e8-5980-4eb9-a91d-8a0c503c5432\") " pod="openstack/placement-9cd0-account-create-kjc4q" Oct 13 21:26:10 crc kubenswrapper[4689]: I1013 21:26:10.161061 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gg2m\" (UniqueName: \"kubernetes.io/projected/b828e6e8-5980-4eb9-a91d-8a0c503c5432-kube-api-access-2gg2m\") pod \"placement-9cd0-account-create-kjc4q\" (UID: \"b828e6e8-5980-4eb9-a91d-8a0c503c5432\") " pod="openstack/placement-9cd0-account-create-kjc4q" Oct 13 21:26:10 crc kubenswrapper[4689]: I1013 21:26:10.182433 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 13 21:26:10 crc kubenswrapper[4689]: I1013 21:26:10.266559 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9cd0-account-create-kjc4q" Oct 13 21:26:10 crc kubenswrapper[4689]: I1013 21:26:10.754872 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:10 crc kubenswrapper[4689]: E1013 21:26:10.755013 4689 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 13 21:26:10 crc kubenswrapper[4689]: E1013 21:26:10.755350 4689 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 13 21:26:10 crc kubenswrapper[4689]: E1013 21:26:10.755412 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift podName:d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5 nodeName:}" failed. No retries permitted until 2025-10-13 21:26:18.755393274 +0000 UTC m=+895.673638359 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift") pod "swift-storage-0" (UID: "d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5") : configmap "swift-ring-files" not found Oct 13 21:26:11 crc kubenswrapper[4689]: I1013 21:26:11.829638 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-004c-account-create-xpm4r"] Oct 13 21:26:11 crc kubenswrapper[4689]: I1013 21:26:11.835838 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-9cd0-account-create-kjc4q"] Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.114761 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.167271 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-d4cq8"] Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.169705 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" podUID="72978894-08d4-40e7-ab23-9d7325ced36d" containerName="dnsmasq-dns" containerID="cri-o://0bc3d08762b455262e47dac113507551da1771744c08af2319108fd6de43f053" gracePeriod=10 Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.265718 4689 generic.go:334] "Generic (PLEG): container finished" podID="8ad8ad88-3a13-433c-b063-3303125e8ee6" containerID="06f380e8df47ef136a4c5c5352312515e5447013df5fbae4cdd3faaf70b28811" exitCode=0 Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.266064 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-004c-account-create-xpm4r" event={"ID":"8ad8ad88-3a13-433c-b063-3303125e8ee6","Type":"ContainerDied","Data":"06f380e8df47ef136a4c5c5352312515e5447013df5fbae4cdd3faaf70b28811"} Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.266103 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-004c-account-create-xpm4r" event={"ID":"8ad8ad88-3a13-433c-b063-3303125e8ee6","Type":"ContainerStarted","Data":"8f94ed745301830102afc659f2462a2088c1d9ade7aa136a8aa8e9d916aa16a7"} Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.297309 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-8rq2n" event={"ID":"0817e909-9d71-4ddd-b3e7-49e41383b1da","Type":"ContainerStarted","Data":"c65ccfa1154fb96274e13a06c6d0afd518ca0401847b2c52c576b7beac0c2ce3"} Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.323983 4689 generic.go:334] "Generic (PLEG): container finished" podID="b828e6e8-5980-4eb9-a91d-8a0c503c5432" containerID="cf16fed46dcda7a7f398f4bf5fb273fd64bf3193a6225923a439987c2be67084" exitCode=0 Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.324042 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9cd0-account-create-kjc4q" event={"ID":"b828e6e8-5980-4eb9-a91d-8a0c503c5432","Type":"ContainerDied","Data":"cf16fed46dcda7a7f398f4bf5fb273fd64bf3193a6225923a439987c2be67084"} Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.324072 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9cd0-account-create-kjc4q" event={"ID":"b828e6e8-5980-4eb9-a91d-8a0c503c5432","Type":"ContainerStarted","Data":"94083701bd1bfb826f2bee98c20bb06bb0729578855c559f482c9e12e15c1e6f"} Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.326514 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-8rq2n" podStartSLOduration=2.586159253 podStartE2EDuration="6.326499313s" podCreationTimestamp="2025-10-13 21:26:06 +0000 UTC" firstStartedPulling="2025-10-13 21:26:07.657019689 +0000 UTC m=+884.575264774" lastFinishedPulling="2025-10-13 21:26:11.397359749 +0000 UTC m=+888.315604834" observedRunningTime="2025-10-13 21:26:12.324898046 +0000 UTC m=+889.243143131" watchObservedRunningTime="2025-10-13 21:26:12.326499313 +0000 UTC m=+889.244744398" Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.779369 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.911518 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-dns-svc\") pod \"72978894-08d4-40e7-ab23-9d7325ced36d\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.911670 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-config\") pod \"72978894-08d4-40e7-ab23-9d7325ced36d\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.911728 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-ovsdbserver-sb\") pod \"72978894-08d4-40e7-ab23-9d7325ced36d\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.911902 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnm4h\" (UniqueName: \"kubernetes.io/projected/72978894-08d4-40e7-ab23-9d7325ced36d-kube-api-access-lnm4h\") pod \"72978894-08d4-40e7-ab23-9d7325ced36d\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.911935 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-ovsdbserver-nb\") pod \"72978894-08d4-40e7-ab23-9d7325ced36d\" (UID: \"72978894-08d4-40e7-ab23-9d7325ced36d\") " Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.931064 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72978894-08d4-40e7-ab23-9d7325ced36d-kube-api-access-lnm4h" (OuterVolumeSpecName: "kube-api-access-lnm4h") pod "72978894-08d4-40e7-ab23-9d7325ced36d" (UID: "72978894-08d4-40e7-ab23-9d7325ced36d"). InnerVolumeSpecName "kube-api-access-lnm4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.954795 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "72978894-08d4-40e7-ab23-9d7325ced36d" (UID: "72978894-08d4-40e7-ab23-9d7325ced36d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.961386 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "72978894-08d4-40e7-ab23-9d7325ced36d" (UID: "72978894-08d4-40e7-ab23-9d7325ced36d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.962666 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-config" (OuterVolumeSpecName: "config") pod "72978894-08d4-40e7-ab23-9d7325ced36d" (UID: "72978894-08d4-40e7-ab23-9d7325ced36d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:12 crc kubenswrapper[4689]: I1013 21:26:12.978739 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "72978894-08d4-40e7-ab23-9d7325ced36d" (UID: "72978894-08d4-40e7-ab23-9d7325ced36d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.016263 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnm4h\" (UniqueName: \"kubernetes.io/projected/72978894-08d4-40e7-ab23-9d7325ced36d-kube-api-access-lnm4h\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.016303 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.016317 4689 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.016328 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.016342 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/72978894-08d4-40e7-ab23-9d7325ced36d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.333229 4689 generic.go:334] "Generic (PLEG): container finished" podID="72978894-08d4-40e7-ab23-9d7325ced36d" containerID="0bc3d08762b455262e47dac113507551da1771744c08af2319108fd6de43f053" exitCode=0 Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.334129 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" event={"ID":"72978894-08d4-40e7-ab23-9d7325ced36d","Type":"ContainerDied","Data":"0bc3d08762b455262e47dac113507551da1771744c08af2319108fd6de43f053"} Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.334223 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" event={"ID":"72978894-08d4-40e7-ab23-9d7325ced36d","Type":"ContainerDied","Data":"0af2ead8782ede722a5d37a20c44886798f2470d8621b9e198566a0d701e73d9"} Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.334261 4689 scope.go:117] "RemoveContainer" containerID="0bc3d08762b455262e47dac113507551da1771744c08af2319108fd6de43f053" Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.334567 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-d4cq8" Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.364077 4689 scope.go:117] "RemoveContainer" containerID="7bf87a513a7d468d4932e859baa2d050ca3b50371cf170e8fd3e8def5aacbd77" Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.386794 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-d4cq8"] Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.392784 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-d4cq8"] Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.421443 4689 scope.go:117] "RemoveContainer" containerID="0bc3d08762b455262e47dac113507551da1771744c08af2319108fd6de43f053" Oct 13 21:26:13 crc kubenswrapper[4689]: E1013 21:26:13.423023 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bc3d08762b455262e47dac113507551da1771744c08af2319108fd6de43f053\": container with ID starting with 0bc3d08762b455262e47dac113507551da1771744c08af2319108fd6de43f053 not found: ID does not exist" containerID="0bc3d08762b455262e47dac113507551da1771744c08af2319108fd6de43f053" Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.423062 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bc3d08762b455262e47dac113507551da1771744c08af2319108fd6de43f053"} err="failed to get container status \"0bc3d08762b455262e47dac113507551da1771744c08af2319108fd6de43f053\": rpc error: code = NotFound desc = could not find container \"0bc3d08762b455262e47dac113507551da1771744c08af2319108fd6de43f053\": container with ID starting with 0bc3d08762b455262e47dac113507551da1771744c08af2319108fd6de43f053 not found: ID does not exist" Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.423089 4689 scope.go:117] "RemoveContainer" containerID="7bf87a513a7d468d4932e859baa2d050ca3b50371cf170e8fd3e8def5aacbd77" Oct 13 21:26:13 crc kubenswrapper[4689]: E1013 21:26:13.423418 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bf87a513a7d468d4932e859baa2d050ca3b50371cf170e8fd3e8def5aacbd77\": container with ID starting with 7bf87a513a7d468d4932e859baa2d050ca3b50371cf170e8fd3e8def5aacbd77 not found: ID does not exist" containerID="7bf87a513a7d468d4932e859baa2d050ca3b50371cf170e8fd3e8def5aacbd77" Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.423441 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bf87a513a7d468d4932e859baa2d050ca3b50371cf170e8fd3e8def5aacbd77"} err="failed to get container status \"7bf87a513a7d468d4932e859baa2d050ca3b50371cf170e8fd3e8def5aacbd77\": rpc error: code = NotFound desc = could not find container \"7bf87a513a7d468d4932e859baa2d050ca3b50371cf170e8fd3e8def5aacbd77\": container with ID starting with 7bf87a513a7d468d4932e859baa2d050ca3b50371cf170e8fd3e8def5aacbd77 not found: ID does not exist" Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.772920 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9cd0-account-create-kjc4q" Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.780705 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-004c-account-create-xpm4r" Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.887116 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72978894-08d4-40e7-ab23-9d7325ced36d" path="/var/lib/kubelet/pods/72978894-08d4-40e7-ab23-9d7325ced36d/volumes" Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.932831 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gg2m\" (UniqueName: \"kubernetes.io/projected/b828e6e8-5980-4eb9-a91d-8a0c503c5432-kube-api-access-2gg2m\") pod \"b828e6e8-5980-4eb9-a91d-8a0c503c5432\" (UID: \"b828e6e8-5980-4eb9-a91d-8a0c503c5432\") " Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.932991 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcr5j\" (UniqueName: \"kubernetes.io/projected/8ad8ad88-3a13-433c-b063-3303125e8ee6-kube-api-access-dcr5j\") pod \"8ad8ad88-3a13-433c-b063-3303125e8ee6\" (UID: \"8ad8ad88-3a13-433c-b063-3303125e8ee6\") " Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.937106 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ad8ad88-3a13-433c-b063-3303125e8ee6-kube-api-access-dcr5j" (OuterVolumeSpecName: "kube-api-access-dcr5j") pod "8ad8ad88-3a13-433c-b063-3303125e8ee6" (UID: "8ad8ad88-3a13-433c-b063-3303125e8ee6"). InnerVolumeSpecName "kube-api-access-dcr5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:13 crc kubenswrapper[4689]: I1013 21:26:13.937367 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b828e6e8-5980-4eb9-a91d-8a0c503c5432-kube-api-access-2gg2m" (OuterVolumeSpecName: "kube-api-access-2gg2m") pod "b828e6e8-5980-4eb9-a91d-8a0c503c5432" (UID: "b828e6e8-5980-4eb9-a91d-8a0c503c5432"). InnerVolumeSpecName "kube-api-access-2gg2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:14 crc kubenswrapper[4689]: I1013 21:26:14.035523 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gg2m\" (UniqueName: \"kubernetes.io/projected/b828e6e8-5980-4eb9-a91d-8a0c503c5432-kube-api-access-2gg2m\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:14 crc kubenswrapper[4689]: I1013 21:26:14.035571 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcr5j\" (UniqueName: \"kubernetes.io/projected/8ad8ad88-3a13-433c-b063-3303125e8ee6-kube-api-access-dcr5j\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:14 crc kubenswrapper[4689]: I1013 21:26:14.344426 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-004c-account-create-xpm4r" event={"ID":"8ad8ad88-3a13-433c-b063-3303125e8ee6","Type":"ContainerDied","Data":"8f94ed745301830102afc659f2462a2088c1d9ade7aa136a8aa8e9d916aa16a7"} Oct 13 21:26:14 crc kubenswrapper[4689]: I1013 21:26:14.344471 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f94ed745301830102afc659f2462a2088c1d9ade7aa136a8aa8e9d916aa16a7" Oct 13 21:26:14 crc kubenswrapper[4689]: I1013 21:26:14.344521 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-004c-account-create-xpm4r" Oct 13 21:26:14 crc kubenswrapper[4689]: I1013 21:26:14.348290 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9cd0-account-create-kjc4q" event={"ID":"b828e6e8-5980-4eb9-a91d-8a0c503c5432","Type":"ContainerDied","Data":"94083701bd1bfb826f2bee98c20bb06bb0729578855c559f482c9e12e15c1e6f"} Oct 13 21:26:14 crc kubenswrapper[4689]: I1013 21:26:14.348432 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94083701bd1bfb826f2bee98c20bb06bb0729578855c559f482c9e12e15c1e6f" Oct 13 21:26:14 crc kubenswrapper[4689]: I1013 21:26:14.348340 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9cd0-account-create-kjc4q" Oct 13 21:26:15 crc kubenswrapper[4689]: I1013 21:26:15.266515 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-b474-account-create-hq4kd"] Oct 13 21:26:15 crc kubenswrapper[4689]: E1013 21:26:15.267352 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ad8ad88-3a13-433c-b063-3303125e8ee6" containerName="mariadb-account-create" Oct 13 21:26:15 crc kubenswrapper[4689]: I1013 21:26:15.267368 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ad8ad88-3a13-433c-b063-3303125e8ee6" containerName="mariadb-account-create" Oct 13 21:26:15 crc kubenswrapper[4689]: E1013 21:26:15.267387 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72978894-08d4-40e7-ab23-9d7325ced36d" containerName="dnsmasq-dns" Oct 13 21:26:15 crc kubenswrapper[4689]: I1013 21:26:15.267396 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="72978894-08d4-40e7-ab23-9d7325ced36d" containerName="dnsmasq-dns" Oct 13 21:26:15 crc kubenswrapper[4689]: E1013 21:26:15.267417 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b828e6e8-5980-4eb9-a91d-8a0c503c5432" containerName="mariadb-account-create" Oct 13 21:26:15 crc kubenswrapper[4689]: I1013 21:26:15.267426 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="b828e6e8-5980-4eb9-a91d-8a0c503c5432" containerName="mariadb-account-create" Oct 13 21:26:15 crc kubenswrapper[4689]: E1013 21:26:15.267447 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72978894-08d4-40e7-ab23-9d7325ced36d" containerName="init" Oct 13 21:26:15 crc kubenswrapper[4689]: I1013 21:26:15.267456 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="72978894-08d4-40e7-ab23-9d7325ced36d" containerName="init" Oct 13 21:26:15 crc kubenswrapper[4689]: I1013 21:26:15.268603 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ad8ad88-3a13-433c-b063-3303125e8ee6" containerName="mariadb-account-create" Oct 13 21:26:15 crc kubenswrapper[4689]: I1013 21:26:15.268638 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="b828e6e8-5980-4eb9-a91d-8a0c503c5432" containerName="mariadb-account-create" Oct 13 21:26:15 crc kubenswrapper[4689]: I1013 21:26:15.268659 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="72978894-08d4-40e7-ab23-9d7325ced36d" containerName="dnsmasq-dns" Oct 13 21:26:15 crc kubenswrapper[4689]: I1013 21:26:15.269481 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-b474-account-create-hq4kd" Oct 13 21:26:15 crc kubenswrapper[4689]: I1013 21:26:15.273419 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 13 21:26:15 crc kubenswrapper[4689]: I1013 21:26:15.284908 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-b474-account-create-hq4kd"] Oct 13 21:26:15 crc kubenswrapper[4689]: I1013 21:26:15.376932 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdvsq\" (UniqueName: \"kubernetes.io/projected/4659f4b9-dd90-4615-bff5-a75920d17db4-kube-api-access-bdvsq\") pod \"glance-b474-account-create-hq4kd\" (UID: \"4659f4b9-dd90-4615-bff5-a75920d17db4\") " pod="openstack/glance-b474-account-create-hq4kd" Oct 13 21:26:15 crc kubenswrapper[4689]: I1013 21:26:15.478830 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdvsq\" (UniqueName: \"kubernetes.io/projected/4659f4b9-dd90-4615-bff5-a75920d17db4-kube-api-access-bdvsq\") pod \"glance-b474-account-create-hq4kd\" (UID: \"4659f4b9-dd90-4615-bff5-a75920d17db4\") " pod="openstack/glance-b474-account-create-hq4kd" Oct 13 21:26:15 crc kubenswrapper[4689]: I1013 21:26:15.508126 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdvsq\" (UniqueName: \"kubernetes.io/projected/4659f4b9-dd90-4615-bff5-a75920d17db4-kube-api-access-bdvsq\") pod \"glance-b474-account-create-hq4kd\" (UID: \"4659f4b9-dd90-4615-bff5-a75920d17db4\") " pod="openstack/glance-b474-account-create-hq4kd" Oct 13 21:26:15 crc kubenswrapper[4689]: I1013 21:26:15.598730 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-b474-account-create-hq4kd" Oct 13 21:26:16 crc kubenswrapper[4689]: I1013 21:26:16.020089 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-b474-account-create-hq4kd"] Oct 13 21:26:16 crc kubenswrapper[4689]: I1013 21:26:16.362548 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-b474-account-create-hq4kd" event={"ID":"4659f4b9-dd90-4615-bff5-a75920d17db4","Type":"ContainerStarted","Data":"a137c0a5c9fe0857430dcd09d36cc33788a93122c88e028dff2689c0ae8023f1"} Oct 13 21:26:18 crc kubenswrapper[4689]: I1013 21:26:18.392364 4689 generic.go:334] "Generic (PLEG): container finished" podID="4659f4b9-dd90-4615-bff5-a75920d17db4" containerID="df2dfdff0e30fac2112a5f8fb8e1ecb215f91f7f6d7a11e4134d62cf6fb93b2e" exitCode=0 Oct 13 21:26:18 crc kubenswrapper[4689]: I1013 21:26:18.392418 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-b474-account-create-hq4kd" event={"ID":"4659f4b9-dd90-4615-bff5-a75920d17db4","Type":"ContainerDied","Data":"df2dfdff0e30fac2112a5f8fb8e1ecb215f91f7f6d7a11e4134d62cf6fb93b2e"} Oct 13 21:26:18 crc kubenswrapper[4689]: I1013 21:26:18.841335 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:18 crc kubenswrapper[4689]: E1013 21:26:18.841640 4689 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 13 21:26:18 crc kubenswrapper[4689]: E1013 21:26:18.841793 4689 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 13 21:26:18 crc kubenswrapper[4689]: E1013 21:26:18.841869 4689 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift podName:d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5 nodeName:}" failed. No retries permitted until 2025-10-13 21:26:34.841844806 +0000 UTC m=+911.760089901 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift") pod "swift-storage-0" (UID: "d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5") : configmap "swift-ring-files" not found Oct 13 21:26:19 crc kubenswrapper[4689]: I1013 21:26:19.404095 4689 generic.go:334] "Generic (PLEG): container finished" podID="0817e909-9d71-4ddd-b3e7-49e41383b1da" containerID="c65ccfa1154fb96274e13a06c6d0afd518ca0401847b2c52c576b7beac0c2ce3" exitCode=0 Oct 13 21:26:19 crc kubenswrapper[4689]: I1013 21:26:19.404215 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-8rq2n" event={"ID":"0817e909-9d71-4ddd-b3e7-49e41383b1da","Type":"ContainerDied","Data":"c65ccfa1154fb96274e13a06c6d0afd518ca0401847b2c52c576b7beac0c2ce3"} Oct 13 21:26:19 crc kubenswrapper[4689]: I1013 21:26:19.734620 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-b474-account-create-hq4kd" Oct 13 21:26:19 crc kubenswrapper[4689]: I1013 21:26:19.865378 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdvsq\" (UniqueName: \"kubernetes.io/projected/4659f4b9-dd90-4615-bff5-a75920d17db4-kube-api-access-bdvsq\") pod \"4659f4b9-dd90-4615-bff5-a75920d17db4\" (UID: \"4659f4b9-dd90-4615-bff5-a75920d17db4\") " Oct 13 21:26:19 crc kubenswrapper[4689]: I1013 21:26:19.879089 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4659f4b9-dd90-4615-bff5-a75920d17db4-kube-api-access-bdvsq" (OuterVolumeSpecName: "kube-api-access-bdvsq") pod "4659f4b9-dd90-4615-bff5-a75920d17db4" (UID: "4659f4b9-dd90-4615-bff5-a75920d17db4"). InnerVolumeSpecName "kube-api-access-bdvsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:19 crc kubenswrapper[4689]: I1013 21:26:19.967567 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdvsq\" (UniqueName: \"kubernetes.io/projected/4659f4b9-dd90-4615-bff5-a75920d17db4-kube-api-access-bdvsq\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.336082 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-8t9jt" podUID="596fffc8-5b10-4da9-950c-ac58fafd2eb2" containerName="ovn-controller" probeResult="failure" output=< Oct 13 21:26:20 crc kubenswrapper[4689]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 13 21:26:20 crc kubenswrapper[4689]: > Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.415089 4689 generic.go:334] "Generic (PLEG): container finished" podID="b974f9f4-057e-4a9c-9835-a9636d5601f8" containerID="00004912b447434369b82694f633ce610c7441a9cb15a89d73edb1d7eb308492" exitCode=0 Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.415185 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b974f9f4-057e-4a9c-9835-a9636d5601f8","Type":"ContainerDied","Data":"00004912b447434369b82694f633ce610c7441a9cb15a89d73edb1d7eb308492"} Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.418789 4689 generic.go:334] "Generic (PLEG): container finished" podID="4fa622b7-d774-4b55-a3e7-2053625177ca" containerID="6d93e24af37eb1e0abc87c994aeea6fb5043a90cb62d744977d5530b09573762" exitCode=0 Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.418860 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4fa622b7-d774-4b55-a3e7-2053625177ca","Type":"ContainerDied","Data":"6d93e24af37eb1e0abc87c994aeea6fb5043a90cb62d744977d5530b09573762"} Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.421747 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-b474-account-create-hq4kd" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.421748 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-b474-account-create-hq4kd" event={"ID":"4659f4b9-dd90-4615-bff5-a75920d17db4","Type":"ContainerDied","Data":"a137c0a5c9fe0857430dcd09d36cc33788a93122c88e028dff2689c0ae8023f1"} Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.421820 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a137c0a5c9fe0857430dcd09d36cc33788a93122c88e028dff2689c0ae8023f1" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.421907 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.746854 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.881816 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0817e909-9d71-4ddd-b3e7-49e41383b1da-ring-data-devices\") pod \"0817e909-9d71-4ddd-b3e7-49e41383b1da\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.882208 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0817e909-9d71-4ddd-b3e7-49e41383b1da-scripts\") pod \"0817e909-9d71-4ddd-b3e7-49e41383b1da\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.882246 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-combined-ca-bundle\") pod \"0817e909-9d71-4ddd-b3e7-49e41383b1da\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.882298 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2g2kn\" (UniqueName: \"kubernetes.io/projected/0817e909-9d71-4ddd-b3e7-49e41383b1da-kube-api-access-2g2kn\") pod \"0817e909-9d71-4ddd-b3e7-49e41383b1da\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.882383 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-dispersionconf\") pod \"0817e909-9d71-4ddd-b3e7-49e41383b1da\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.882405 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0817e909-9d71-4ddd-b3e7-49e41383b1da-etc-swift\") pod \"0817e909-9d71-4ddd-b3e7-49e41383b1da\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.882433 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-swiftconf\") pod \"0817e909-9d71-4ddd-b3e7-49e41383b1da\" (UID: \"0817e909-9d71-4ddd-b3e7-49e41383b1da\") " Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.882572 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0817e909-9d71-4ddd-b3e7-49e41383b1da-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "0817e909-9d71-4ddd-b3e7-49e41383b1da" (UID: "0817e909-9d71-4ddd-b3e7-49e41383b1da"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.883105 4689 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0817e909-9d71-4ddd-b3e7-49e41383b1da-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.883917 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0817e909-9d71-4ddd-b3e7-49e41383b1da-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "0817e909-9d71-4ddd-b3e7-49e41383b1da" (UID: "0817e909-9d71-4ddd-b3e7-49e41383b1da"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.889215 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0817e909-9d71-4ddd-b3e7-49e41383b1da-kube-api-access-2g2kn" (OuterVolumeSpecName: "kube-api-access-2g2kn") pod "0817e909-9d71-4ddd-b3e7-49e41383b1da" (UID: "0817e909-9d71-4ddd-b3e7-49e41383b1da"). InnerVolumeSpecName "kube-api-access-2g2kn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.891447 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "0817e909-9d71-4ddd-b3e7-49e41383b1da" (UID: "0817e909-9d71-4ddd-b3e7-49e41383b1da"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.906954 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "0817e909-9d71-4ddd-b3e7-49e41383b1da" (UID: "0817e909-9d71-4ddd-b3e7-49e41383b1da"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.914343 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0817e909-9d71-4ddd-b3e7-49e41383b1da" (UID: "0817e909-9d71-4ddd-b3e7-49e41383b1da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.924972 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0817e909-9d71-4ddd-b3e7-49e41383b1da-scripts" (OuterVolumeSpecName: "scripts") pod "0817e909-9d71-4ddd-b3e7-49e41383b1da" (UID: "0817e909-9d71-4ddd-b3e7-49e41383b1da"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.984844 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2g2kn\" (UniqueName: \"kubernetes.io/projected/0817e909-9d71-4ddd-b3e7-49e41383b1da-kube-api-access-2g2kn\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.984872 4689 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.984885 4689 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0817e909-9d71-4ddd-b3e7-49e41383b1da-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.984911 4689 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.984920 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0817e909-9d71-4ddd-b3e7-49e41383b1da-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:20 crc kubenswrapper[4689]: I1013 21:26:20.984929 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0817e909-9d71-4ddd-b3e7-49e41383b1da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:21 crc kubenswrapper[4689]: I1013 21:26:21.434728 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4fa622b7-d774-4b55-a3e7-2053625177ca","Type":"ContainerStarted","Data":"5eb926dc5ecd9f988b8a26cc3c8d8a45f55fd8ea1687ce1aa3db23ea76500b7a"} Oct 13 21:26:21 crc kubenswrapper[4689]: I1013 21:26:21.436624 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-8rq2n" Oct 13 21:26:21 crc kubenswrapper[4689]: I1013 21:26:21.437337 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-8rq2n" event={"ID":"0817e909-9d71-4ddd-b3e7-49e41383b1da","Type":"ContainerDied","Data":"53676dd50d14d76cd4e024ad405702654a883141182a163ba1961adde74e39a4"} Oct 13 21:26:21 crc kubenswrapper[4689]: I1013 21:26:21.437742 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53676dd50d14d76cd4e024ad405702654a883141182a163ba1961adde74e39a4" Oct 13 21:26:21 crc kubenswrapper[4689]: I1013 21:26:21.437999 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 13 21:26:21 crc kubenswrapper[4689]: I1013 21:26:21.438937 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b974f9f4-057e-4a9c-9835-a9636d5601f8","Type":"ContainerStarted","Data":"e181ed88165040f3f0ece477fd380d3b5203e300d2172dfa5d1a168538ec6a3f"} Oct 13 21:26:21 crc kubenswrapper[4689]: I1013 21:26:21.439170 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:26:21 crc kubenswrapper[4689]: I1013 21:26:21.466999 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=48.040356663 podStartE2EDuration="56.466975795s" podCreationTimestamp="2025-10-13 21:25:25 +0000 UTC" firstStartedPulling="2025-10-13 21:25:37.862717462 +0000 UTC m=+854.780962547" lastFinishedPulling="2025-10-13 21:25:46.289336594 +0000 UTC m=+863.207581679" observedRunningTime="2025-10-13 21:26:21.458950576 +0000 UTC m=+898.377195711" watchObservedRunningTime="2025-10-13 21:26:21.466975795 +0000 UTC m=+898.385220880" Oct 13 21:26:21 crc kubenswrapper[4689]: I1013 21:26:21.493007 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=47.721093566 podStartE2EDuration="56.492983318s" podCreationTimestamp="2025-10-13 21:25:25 +0000 UTC" firstStartedPulling="2025-10-13 21:25:37.82531501 +0000 UTC m=+854.743560095" lastFinishedPulling="2025-10-13 21:25:46.597204742 +0000 UTC m=+863.515449847" observedRunningTime="2025-10-13 21:26:21.484038277 +0000 UTC m=+898.402283362" watchObservedRunningTime="2025-10-13 21:26:21.492983318 +0000 UTC m=+898.411228403" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.347142 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-8t9jt" podUID="596fffc8-5b10-4da9-950c-ac58fafd2eb2" containerName="ovn-controller" probeResult="failure" output=< Oct 13 21:26:25 crc kubenswrapper[4689]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 13 21:26:25 crc kubenswrapper[4689]: > Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.436719 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-hm6gs"] Oct 13 21:26:25 crc kubenswrapper[4689]: E1013 21:26:25.437064 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4659f4b9-dd90-4615-bff5-a75920d17db4" containerName="mariadb-account-create" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.437079 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="4659f4b9-dd90-4615-bff5-a75920d17db4" containerName="mariadb-account-create" Oct 13 21:26:25 crc kubenswrapper[4689]: E1013 21:26:25.437097 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0817e909-9d71-4ddd-b3e7-49e41383b1da" containerName="swift-ring-rebalance" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.437103 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="0817e909-9d71-4ddd-b3e7-49e41383b1da" containerName="swift-ring-rebalance" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.437249 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="4659f4b9-dd90-4615-bff5-a75920d17db4" containerName="mariadb-account-create" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.437269 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="0817e909-9d71-4ddd-b3e7-49e41383b1da" containerName="swift-ring-rebalance" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.437788 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hm6gs" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.440287 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-9jnhx" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.440990 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.450646 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-hm6gs"] Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.459041 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-6fdqj" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.469809 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-combined-ca-bundle\") pod \"glance-db-sync-hm6gs\" (UID: \"ea77789b-b65d-4659-9169-ef628cda1bc1\") " pod="openstack/glance-db-sync-hm6gs" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.469883 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-config-data\") pod \"glance-db-sync-hm6gs\" (UID: \"ea77789b-b65d-4659-9169-ef628cda1bc1\") " pod="openstack/glance-db-sync-hm6gs" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.469936 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzrb9\" (UniqueName: \"kubernetes.io/projected/ea77789b-b65d-4659-9169-ef628cda1bc1-kube-api-access-fzrb9\") pod \"glance-db-sync-hm6gs\" (UID: \"ea77789b-b65d-4659-9169-ef628cda1bc1\") " pod="openstack/glance-db-sync-hm6gs" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.470172 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-db-sync-config-data\") pod \"glance-db-sync-hm6gs\" (UID: \"ea77789b-b65d-4659-9169-ef628cda1bc1\") " pod="openstack/glance-db-sync-hm6gs" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.572155 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-combined-ca-bundle\") pod \"glance-db-sync-hm6gs\" (UID: \"ea77789b-b65d-4659-9169-ef628cda1bc1\") " pod="openstack/glance-db-sync-hm6gs" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.572251 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-config-data\") pod \"glance-db-sync-hm6gs\" (UID: \"ea77789b-b65d-4659-9169-ef628cda1bc1\") " pod="openstack/glance-db-sync-hm6gs" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.572286 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzrb9\" (UniqueName: \"kubernetes.io/projected/ea77789b-b65d-4659-9169-ef628cda1bc1-kube-api-access-fzrb9\") pod \"glance-db-sync-hm6gs\" (UID: \"ea77789b-b65d-4659-9169-ef628cda1bc1\") " pod="openstack/glance-db-sync-hm6gs" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.572377 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-db-sync-config-data\") pod \"glance-db-sync-hm6gs\" (UID: \"ea77789b-b65d-4659-9169-ef628cda1bc1\") " pod="openstack/glance-db-sync-hm6gs" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.580115 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-config-data\") pod \"glance-db-sync-hm6gs\" (UID: \"ea77789b-b65d-4659-9169-ef628cda1bc1\") " pod="openstack/glance-db-sync-hm6gs" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.580387 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-db-sync-config-data\") pod \"glance-db-sync-hm6gs\" (UID: \"ea77789b-b65d-4659-9169-ef628cda1bc1\") " pod="openstack/glance-db-sync-hm6gs" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.586557 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-combined-ca-bundle\") pod \"glance-db-sync-hm6gs\" (UID: \"ea77789b-b65d-4659-9169-ef628cda1bc1\") " pod="openstack/glance-db-sync-hm6gs" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.591037 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzrb9\" (UniqueName: \"kubernetes.io/projected/ea77789b-b65d-4659-9169-ef628cda1bc1-kube-api-access-fzrb9\") pod \"glance-db-sync-hm6gs\" (UID: \"ea77789b-b65d-4659-9169-ef628cda1bc1\") " pod="openstack/glance-db-sync-hm6gs" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.667384 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-8t9jt-config-zp68k"] Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.668820 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.671469 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.692433 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8t9jt-config-zp68k"] Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.759764 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hm6gs" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.778224 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9dc013b1-3197-407f-aa39-ec993cf4e78b-additional-scripts\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.778307 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-run-ovn\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.778366 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-log-ovn\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.778402 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnhsv\" (UniqueName: \"kubernetes.io/projected/9dc013b1-3197-407f-aa39-ec993cf4e78b-kube-api-access-pnhsv\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.778584 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9dc013b1-3197-407f-aa39-ec993cf4e78b-scripts\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.778870 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-run\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.882672 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-run\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.882774 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9dc013b1-3197-407f-aa39-ec993cf4e78b-additional-scripts\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.882809 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-run-ovn\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.882845 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-log-ovn\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.882876 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnhsv\" (UniqueName: \"kubernetes.io/projected/9dc013b1-3197-407f-aa39-ec993cf4e78b-kube-api-access-pnhsv\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.882913 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9dc013b1-3197-407f-aa39-ec993cf4e78b-scripts\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.884107 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-run-ovn\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.884245 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-run\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.885513 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9dc013b1-3197-407f-aa39-ec993cf4e78b-additional-scripts\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.886863 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-log-ovn\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.888504 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9dc013b1-3197-407f-aa39-ec993cf4e78b-scripts\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.929656 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnhsv\" (UniqueName: \"kubernetes.io/projected/9dc013b1-3197-407f-aa39-ec993cf4e78b-kube-api-access-pnhsv\") pod \"ovn-controller-8t9jt-config-zp68k\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:25 crc kubenswrapper[4689]: I1013 21:26:25.988043 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:26 crc kubenswrapper[4689]: I1013 21:26:26.354447 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-hm6gs"] Oct 13 21:26:26 crc kubenswrapper[4689]: I1013 21:26:26.470286 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8t9jt-config-zp68k"] Oct 13 21:26:26 crc kubenswrapper[4689]: W1013 21:26:26.472382 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9dc013b1_3197_407f_aa39_ec993cf4e78b.slice/crio-6c463fa527103c0e758a6be5ba7e88a480adbf407a6a5c24b437f0f51d51dc64 WatchSource:0}: Error finding container 6c463fa527103c0e758a6be5ba7e88a480adbf407a6a5c24b437f0f51d51dc64: Status 404 returned error can't find the container with id 6c463fa527103c0e758a6be5ba7e88a480adbf407a6a5c24b437f0f51d51dc64 Oct 13 21:26:26 crc kubenswrapper[4689]: I1013 21:26:26.482230 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hm6gs" event={"ID":"ea77789b-b65d-4659-9169-ef628cda1bc1","Type":"ContainerStarted","Data":"621c0c0d4d465a67cd717c16e3634a9ec20592a26b477b3d5df102de9646476f"} Oct 13 21:26:26 crc kubenswrapper[4689]: I1013 21:26:26.483502 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8t9jt-config-zp68k" event={"ID":"9dc013b1-3197-407f-aa39-ec993cf4e78b","Type":"ContainerStarted","Data":"6c463fa527103c0e758a6be5ba7e88a480adbf407a6a5c24b437f0f51d51dc64"} Oct 13 21:26:27 crc kubenswrapper[4689]: I1013 21:26:27.495624 4689 generic.go:334] "Generic (PLEG): container finished" podID="9dc013b1-3197-407f-aa39-ec993cf4e78b" containerID="c2411c36b63d5bc86fd3c7da8260f1554e7750d582b6616ac3583bcf8a37d0e6" exitCode=0 Oct 13 21:26:27 crc kubenswrapper[4689]: I1013 21:26:27.495949 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8t9jt-config-zp68k" event={"ID":"9dc013b1-3197-407f-aa39-ec993cf4e78b","Type":"ContainerDied","Data":"c2411c36b63d5bc86fd3c7da8260f1554e7750d582b6616ac3583bcf8a37d0e6"} Oct 13 21:26:28 crc kubenswrapper[4689]: I1013 21:26:28.849128 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:28 crc kubenswrapper[4689]: I1013 21:26:28.942520 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnhsv\" (UniqueName: \"kubernetes.io/projected/9dc013b1-3197-407f-aa39-ec993cf4e78b-kube-api-access-pnhsv\") pod \"9dc013b1-3197-407f-aa39-ec993cf4e78b\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " Oct 13 21:26:28 crc kubenswrapper[4689]: I1013 21:26:28.942625 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-log-ovn\") pod \"9dc013b1-3197-407f-aa39-ec993cf4e78b\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " Oct 13 21:26:28 crc kubenswrapper[4689]: I1013 21:26:28.942717 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-run-ovn\") pod \"9dc013b1-3197-407f-aa39-ec993cf4e78b\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " Oct 13 21:26:28 crc kubenswrapper[4689]: I1013 21:26:28.943226 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "9dc013b1-3197-407f-aa39-ec993cf4e78b" (UID: "9dc013b1-3197-407f-aa39-ec993cf4e78b"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:26:28 crc kubenswrapper[4689]: I1013 21:26:28.943340 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "9dc013b1-3197-407f-aa39-ec993cf4e78b" (UID: "9dc013b1-3197-407f-aa39-ec993cf4e78b"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:26:28 crc kubenswrapper[4689]: I1013 21:26:28.943888 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9dc013b1-3197-407f-aa39-ec993cf4e78b-scripts\") pod \"9dc013b1-3197-407f-aa39-ec993cf4e78b\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " Oct 13 21:26:28 crc kubenswrapper[4689]: I1013 21:26:28.944114 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9dc013b1-3197-407f-aa39-ec993cf4e78b-additional-scripts\") pod \"9dc013b1-3197-407f-aa39-ec993cf4e78b\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " Oct 13 21:26:28 crc kubenswrapper[4689]: I1013 21:26:28.944194 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-run\") pod \"9dc013b1-3197-407f-aa39-ec993cf4e78b\" (UID: \"9dc013b1-3197-407f-aa39-ec993cf4e78b\") " Oct 13 21:26:28 crc kubenswrapper[4689]: I1013 21:26:28.945022 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9dc013b1-3197-407f-aa39-ec993cf4e78b-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "9dc013b1-3197-407f-aa39-ec993cf4e78b" (UID: "9dc013b1-3197-407f-aa39-ec993cf4e78b"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:28 crc kubenswrapper[4689]: I1013 21:26:28.945055 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-run" (OuterVolumeSpecName: "var-run") pod "9dc013b1-3197-407f-aa39-ec993cf4e78b" (UID: "9dc013b1-3197-407f-aa39-ec993cf4e78b"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:26:28 crc kubenswrapper[4689]: I1013 21:26:28.946075 4689 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9dc013b1-3197-407f-aa39-ec993cf4e78b-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:28 crc kubenswrapper[4689]: I1013 21:26:28.946248 4689 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-run\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:28 crc kubenswrapper[4689]: I1013 21:26:28.946279 4689 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:28 crc kubenswrapper[4689]: I1013 21:26:28.946293 4689 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9dc013b1-3197-407f-aa39-ec993cf4e78b-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:28 crc kubenswrapper[4689]: I1013 21:26:28.947180 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9dc013b1-3197-407f-aa39-ec993cf4e78b-scripts" (OuterVolumeSpecName: "scripts") pod "9dc013b1-3197-407f-aa39-ec993cf4e78b" (UID: "9dc013b1-3197-407f-aa39-ec993cf4e78b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:28 crc kubenswrapper[4689]: I1013 21:26:28.956752 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dc013b1-3197-407f-aa39-ec993cf4e78b-kube-api-access-pnhsv" (OuterVolumeSpecName: "kube-api-access-pnhsv") pod "9dc013b1-3197-407f-aa39-ec993cf4e78b" (UID: "9dc013b1-3197-407f-aa39-ec993cf4e78b"). InnerVolumeSpecName "kube-api-access-pnhsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:29 crc kubenswrapper[4689]: I1013 21:26:29.047674 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnhsv\" (UniqueName: \"kubernetes.io/projected/9dc013b1-3197-407f-aa39-ec993cf4e78b-kube-api-access-pnhsv\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:29 crc kubenswrapper[4689]: I1013 21:26:29.047711 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9dc013b1-3197-407f-aa39-ec993cf4e78b-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:29 crc kubenswrapper[4689]: I1013 21:26:29.512637 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8t9jt-config-zp68k" Oct 13 21:26:29 crc kubenswrapper[4689]: I1013 21:26:29.512643 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8t9jt-config-zp68k" event={"ID":"9dc013b1-3197-407f-aa39-ec993cf4e78b","Type":"ContainerDied","Data":"6c463fa527103c0e758a6be5ba7e88a480adbf407a6a5c24b437f0f51d51dc64"} Oct 13 21:26:29 crc kubenswrapper[4689]: I1013 21:26:29.513263 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c463fa527103c0e758a6be5ba7e88a480adbf407a6a5c24b437f0f51d51dc64" Oct 13 21:26:29 crc kubenswrapper[4689]: I1013 21:26:29.939354 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-8t9jt-config-zp68k"] Oct 13 21:26:29 crc kubenswrapper[4689]: I1013 21:26:29.946720 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-8t9jt-config-zp68k"] Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.060083 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-8t9jt-config-5fprt"] Oct 13 21:26:30 crc kubenswrapper[4689]: E1013 21:26:30.060414 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dc013b1-3197-407f-aa39-ec993cf4e78b" containerName="ovn-config" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.060430 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dc013b1-3197-407f-aa39-ec993cf4e78b" containerName="ovn-config" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.060571 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dc013b1-3197-407f-aa39-ec993cf4e78b" containerName="ovn-config" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.061078 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.064079 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.068976 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8t9jt-config-5fprt"] Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.168563 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d868715-fd50-4fdc-a8c9-77d82739beb6-scripts\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.176753 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqzgx\" (UniqueName: \"kubernetes.io/projected/8d868715-fd50-4fdc-a8c9-77d82739beb6-kube-api-access-cqzgx\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.177131 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-run\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.177335 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8d868715-fd50-4fdc-a8c9-77d82739beb6-additional-scripts\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.177526 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-log-ovn\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.177692 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-run-ovn\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.278901 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8d868715-fd50-4fdc-a8c9-77d82739beb6-additional-scripts\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.278986 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-log-ovn\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.279025 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-run-ovn\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.279058 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d868715-fd50-4fdc-a8c9-77d82739beb6-scripts\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.279082 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqzgx\" (UniqueName: \"kubernetes.io/projected/8d868715-fd50-4fdc-a8c9-77d82739beb6-kube-api-access-cqzgx\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.279107 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-run\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.279473 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-run\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.279710 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-run-ovn\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.279863 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-log-ovn\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.280203 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8d868715-fd50-4fdc-a8c9-77d82739beb6-additional-scripts\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.281842 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d868715-fd50-4fdc-a8c9-77d82739beb6-scripts\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.306675 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqzgx\" (UniqueName: \"kubernetes.io/projected/8d868715-fd50-4fdc-a8c9-77d82739beb6-kube-api-access-cqzgx\") pod \"ovn-controller-8t9jt-config-5fprt\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.371527 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-8t9jt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.434606 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:30 crc kubenswrapper[4689]: I1013 21:26:30.876110 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8t9jt-config-5fprt"] Oct 13 21:26:31 crc kubenswrapper[4689]: I1013 21:26:31.535026 4689 generic.go:334] "Generic (PLEG): container finished" podID="8d868715-fd50-4fdc-a8c9-77d82739beb6" containerID="56c9c5bb6ca28063d276ff3a301de8321837344a9e159354626434507914970b" exitCode=0 Oct 13 21:26:31 crc kubenswrapper[4689]: I1013 21:26:31.535078 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8t9jt-config-5fprt" event={"ID":"8d868715-fd50-4fdc-a8c9-77d82739beb6","Type":"ContainerDied","Data":"56c9c5bb6ca28063d276ff3a301de8321837344a9e159354626434507914970b"} Oct 13 21:26:31 crc kubenswrapper[4689]: I1013 21:26:31.535106 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8t9jt-config-5fprt" event={"ID":"8d868715-fd50-4fdc-a8c9-77d82739beb6","Type":"ContainerStarted","Data":"8d73455266286d299f39c3fdbbe24da288d78de9ed0b24e8f587343aec44c92a"} Oct 13 21:26:31 crc kubenswrapper[4689]: I1013 21:26:31.877454 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dc013b1-3197-407f-aa39-ec993cf4e78b" path="/var/lib/kubelet/pods/9dc013b1-3197-407f-aa39-ec993cf4e78b/volumes" Oct 13 21:26:34 crc kubenswrapper[4689]: I1013 21:26:34.863324 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:34 crc kubenswrapper[4689]: I1013 21:26:34.872172 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5-etc-swift\") pod \"swift-storage-0\" (UID: \"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5\") " pod="openstack/swift-storage-0" Oct 13 21:26:35 crc kubenswrapper[4689]: I1013 21:26:35.050890 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 13 21:26:36 crc kubenswrapper[4689]: I1013 21:26:36.453099 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 13 21:26:36 crc kubenswrapper[4689]: I1013 21:26:36.711186 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-nlsl4"] Oct 13 21:26:36 crc kubenswrapper[4689]: I1013 21:26:36.712249 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nlsl4" Oct 13 21:26:36 crc kubenswrapper[4689]: I1013 21:26:36.719551 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-nlsl4"] Oct 13 21:26:36 crc kubenswrapper[4689]: I1013 21:26:36.746813 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:26:36 crc kubenswrapper[4689]: I1013 21:26:36.838772 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-j7vd8"] Oct 13 21:26:36 crc kubenswrapper[4689]: I1013 21:26:36.839883 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-j7vd8" Oct 13 21:26:36 crc kubenswrapper[4689]: I1013 21:26:36.858895 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-j7vd8"] Oct 13 21:26:36 crc kubenswrapper[4689]: I1013 21:26:36.905025 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdzh5\" (UniqueName: \"kubernetes.io/projected/5bfcb233-910d-4200-a46c-f6d350f0e252-kube-api-access-rdzh5\") pod \"barbican-db-create-nlsl4\" (UID: \"5bfcb233-910d-4200-a46c-f6d350f0e252\") " pod="openstack/barbican-db-create-nlsl4" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.006259 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdzh5\" (UniqueName: \"kubernetes.io/projected/5bfcb233-910d-4200-a46c-f6d350f0e252-kube-api-access-rdzh5\") pod \"barbican-db-create-nlsl4\" (UID: \"5bfcb233-910d-4200-a46c-f6d350f0e252\") " pod="openstack/barbican-db-create-nlsl4" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.006414 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzgps\" (UniqueName: \"kubernetes.io/projected/eca580fb-8c72-4c1d-a917-e1e6a614e3cb-kube-api-access-lzgps\") pod \"cinder-db-create-j7vd8\" (UID: \"eca580fb-8c72-4c1d-a917-e1e6a614e3cb\") " pod="openstack/cinder-db-create-j7vd8" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.021521 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-8djwn"] Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.022906 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-8djwn" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.035862 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdzh5\" (UniqueName: \"kubernetes.io/projected/5bfcb233-910d-4200-a46c-f6d350f0e252-kube-api-access-rdzh5\") pod \"barbican-db-create-nlsl4\" (UID: \"5bfcb233-910d-4200-a46c-f6d350f0e252\") " pod="openstack/barbican-db-create-nlsl4" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.038737 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-8djwn"] Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.107660 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzgps\" (UniqueName: \"kubernetes.io/projected/eca580fb-8c72-4c1d-a917-e1e6a614e3cb-kube-api-access-lzgps\") pod \"cinder-db-create-j7vd8\" (UID: \"eca580fb-8c72-4c1d-a917-e1e6a614e3cb\") " pod="openstack/cinder-db-create-j7vd8" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.107731 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8gsd\" (UniqueName: \"kubernetes.io/projected/131d3017-57bd-4d6c-982c-7941d55822b9-kube-api-access-n8gsd\") pod \"neutron-db-create-8djwn\" (UID: \"131d3017-57bd-4d6c-982c-7941d55822b9\") " pod="openstack/neutron-db-create-8djwn" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.129899 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzgps\" (UniqueName: \"kubernetes.io/projected/eca580fb-8c72-4c1d-a917-e1e6a614e3cb-kube-api-access-lzgps\") pod \"cinder-db-create-j7vd8\" (UID: \"eca580fb-8c72-4c1d-a917-e1e6a614e3cb\") " pod="openstack/cinder-db-create-j7vd8" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.160696 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-j7vd8" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.210019 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8gsd\" (UniqueName: \"kubernetes.io/projected/131d3017-57bd-4d6c-982c-7941d55822b9-kube-api-access-n8gsd\") pod \"neutron-db-create-8djwn\" (UID: \"131d3017-57bd-4d6c-982c-7941d55822b9\") " pod="openstack/neutron-db-create-8djwn" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.232734 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8gsd\" (UniqueName: \"kubernetes.io/projected/131d3017-57bd-4d6c-982c-7941d55822b9-kube-api-access-n8gsd\") pod \"neutron-db-create-8djwn\" (UID: \"131d3017-57bd-4d6c-982c-7941d55822b9\") " pod="openstack/neutron-db-create-8djwn" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.257416 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-2vmxt"] Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.258466 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-2vmxt" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.260955 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.261036 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.263680 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-dqmvt" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.263779 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.267551 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-2vmxt"] Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.334522 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nlsl4" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.380656 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-8djwn" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.415244 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbz8k\" (UniqueName: \"kubernetes.io/projected/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-kube-api-access-pbz8k\") pod \"keystone-db-sync-2vmxt\" (UID: \"ca29831b-9c1d-4c38-bf2a-c17e40542a0c\") " pod="openstack/keystone-db-sync-2vmxt" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.415301 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-config-data\") pod \"keystone-db-sync-2vmxt\" (UID: \"ca29831b-9c1d-4c38-bf2a-c17e40542a0c\") " pod="openstack/keystone-db-sync-2vmxt" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.415332 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-combined-ca-bundle\") pod \"keystone-db-sync-2vmxt\" (UID: \"ca29831b-9c1d-4c38-bf2a-c17e40542a0c\") " pod="openstack/keystone-db-sync-2vmxt" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.517438 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbz8k\" (UniqueName: \"kubernetes.io/projected/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-kube-api-access-pbz8k\") pod \"keystone-db-sync-2vmxt\" (UID: \"ca29831b-9c1d-4c38-bf2a-c17e40542a0c\") " pod="openstack/keystone-db-sync-2vmxt" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.517505 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-config-data\") pod \"keystone-db-sync-2vmxt\" (UID: \"ca29831b-9c1d-4c38-bf2a-c17e40542a0c\") " pod="openstack/keystone-db-sync-2vmxt" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.517540 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-combined-ca-bundle\") pod \"keystone-db-sync-2vmxt\" (UID: \"ca29831b-9c1d-4c38-bf2a-c17e40542a0c\") " pod="openstack/keystone-db-sync-2vmxt" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.521641 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-combined-ca-bundle\") pod \"keystone-db-sync-2vmxt\" (UID: \"ca29831b-9c1d-4c38-bf2a-c17e40542a0c\") " pod="openstack/keystone-db-sync-2vmxt" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.521805 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-config-data\") pod \"keystone-db-sync-2vmxt\" (UID: \"ca29831b-9c1d-4c38-bf2a-c17e40542a0c\") " pod="openstack/keystone-db-sync-2vmxt" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.537380 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbz8k\" (UniqueName: \"kubernetes.io/projected/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-kube-api-access-pbz8k\") pod \"keystone-db-sync-2vmxt\" (UID: \"ca29831b-9c1d-4c38-bf2a-c17e40542a0c\") " pod="openstack/keystone-db-sync-2vmxt" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.627387 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-2vmxt" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.677401 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.823923 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8d868715-fd50-4fdc-a8c9-77d82739beb6-additional-scripts\") pod \"8d868715-fd50-4fdc-a8c9-77d82739beb6\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.824224 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d868715-fd50-4fdc-a8c9-77d82739beb6-scripts\") pod \"8d868715-fd50-4fdc-a8c9-77d82739beb6\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.824271 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-log-ovn\") pod \"8d868715-fd50-4fdc-a8c9-77d82739beb6\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.824310 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-run-ovn\") pod \"8d868715-fd50-4fdc-a8c9-77d82739beb6\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.824370 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqzgx\" (UniqueName: \"kubernetes.io/projected/8d868715-fd50-4fdc-a8c9-77d82739beb6-kube-api-access-cqzgx\") pod \"8d868715-fd50-4fdc-a8c9-77d82739beb6\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.824424 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-run\") pod \"8d868715-fd50-4fdc-a8c9-77d82739beb6\" (UID: \"8d868715-fd50-4fdc-a8c9-77d82739beb6\") " Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.824840 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-run" (OuterVolumeSpecName: "var-run") pod "8d868715-fd50-4fdc-a8c9-77d82739beb6" (UID: "8d868715-fd50-4fdc-a8c9-77d82739beb6"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.824904 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d868715-fd50-4fdc-a8c9-77d82739beb6-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "8d868715-fd50-4fdc-a8c9-77d82739beb6" (UID: "8d868715-fd50-4fdc-a8c9-77d82739beb6"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.824906 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "8d868715-fd50-4fdc-a8c9-77d82739beb6" (UID: "8d868715-fd50-4fdc-a8c9-77d82739beb6"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.824924 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "8d868715-fd50-4fdc-a8c9-77d82739beb6" (UID: "8d868715-fd50-4fdc-a8c9-77d82739beb6"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.827237 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d868715-fd50-4fdc-a8c9-77d82739beb6-scripts" (OuterVolumeSpecName: "scripts") pod "8d868715-fd50-4fdc-a8c9-77d82739beb6" (UID: "8d868715-fd50-4fdc-a8c9-77d82739beb6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.833892 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d868715-fd50-4fdc-a8c9-77d82739beb6-kube-api-access-cqzgx" (OuterVolumeSpecName: "kube-api-access-cqzgx") pod "8d868715-fd50-4fdc-a8c9-77d82739beb6" (UID: "8d868715-fd50-4fdc-a8c9-77d82739beb6"). InnerVolumeSpecName "kube-api-access-cqzgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.926566 4689 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8d868715-fd50-4fdc-a8c9-77d82739beb6-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.927195 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d868715-fd50-4fdc-a8c9-77d82739beb6-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.927208 4689 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.927221 4689 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.927236 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqzgx\" (UniqueName: \"kubernetes.io/projected/8d868715-fd50-4fdc-a8c9-77d82739beb6-kube-api-access-cqzgx\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:37 crc kubenswrapper[4689]: I1013 21:26:37.927248 4689 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8d868715-fd50-4fdc-a8c9-77d82739beb6-var-run\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.195980 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-j7vd8"] Oct 13 21:26:38 crc kubenswrapper[4689]: W1013 21:26:38.223202 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeca580fb_8c72_4c1d_a917_e1e6a614e3cb.slice/crio-4ebd8cf0c802ebb9da7d4b218cb0fc145106a6e6ba73bce357938a38b92acbb5 WatchSource:0}: Error finding container 4ebd8cf0c802ebb9da7d4b218cb0fc145106a6e6ba73bce357938a38b92acbb5: Status 404 returned error can't find the container with id 4ebd8cf0c802ebb9da7d4b218cb0fc145106a6e6ba73bce357938a38b92acbb5 Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.295675 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-8djwn"] Oct 13 21:26:38 crc kubenswrapper[4689]: W1013 21:26:38.305429 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod131d3017_57bd_4d6c_982c_7941d55822b9.slice/crio-ce734ba331974eef85994ff67665e5c17840f6e397ae6d67fece64d3e4beade7 WatchSource:0}: Error finding container ce734ba331974eef85994ff67665e5c17840f6e397ae6d67fece64d3e4beade7: Status 404 returned error can't find the container with id ce734ba331974eef85994ff67665e5c17840f6e397ae6d67fece64d3e4beade7 Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.461093 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-nlsl4"] Oct 13 21:26:38 crc kubenswrapper[4689]: W1013 21:26:38.475279 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5bfcb233_910d_4200_a46c_f6d350f0e252.slice/crio-a44b54fe68f7cacc7dcf7b4eb361006d9cd5085d987400d4cbc32f052c88f86c WatchSource:0}: Error finding container a44b54fe68f7cacc7dcf7b4eb361006d9cd5085d987400d4cbc32f052c88f86c: Status 404 returned error can't find the container with id a44b54fe68f7cacc7dcf7b4eb361006d9cd5085d987400d4cbc32f052c88f86c Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.501716 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-2vmxt"] Oct 13 21:26:38 crc kubenswrapper[4689]: W1013 21:26:38.507495 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca29831b_9c1d_4c38_bf2a_c17e40542a0c.slice/crio-9dfba7112fdb1860ccf32e8ed0a014b6d28a4308afa6128f3095a726ea7149fa WatchSource:0}: Error finding container 9dfba7112fdb1860ccf32e8ed0a014b6d28a4308afa6128f3095a726ea7149fa: Status 404 returned error can't find the container with id 9dfba7112fdb1860ccf32e8ed0a014b6d28a4308afa6128f3095a726ea7149fa Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.534611 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 13 21:26:38 crc kubenswrapper[4689]: W1013 21:26:38.551689 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd67ea903_1ba1_4116_a39d_b2ca0d5d7eb5.slice/crio-0d1663febe41aae77afb1f5247c231e4f6948befa42fda2447b7cce4697080b5 WatchSource:0}: Error finding container 0d1663febe41aae77afb1f5247c231e4f6948befa42fda2447b7cce4697080b5: Status 404 returned error can't find the container with id 0d1663febe41aae77afb1f5247c231e4f6948befa42fda2447b7cce4697080b5 Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.599120 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-j7vd8" event={"ID":"eca580fb-8c72-4c1d-a917-e1e6a614e3cb","Type":"ContainerStarted","Data":"74bef32fe2ec32d7dd58994f00cf963830c4677bd17ac6e9c28409bc30fc1d54"} Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.599186 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-j7vd8" event={"ID":"eca580fb-8c72-4c1d-a917-e1e6a614e3cb","Type":"ContainerStarted","Data":"4ebd8cf0c802ebb9da7d4b218cb0fc145106a6e6ba73bce357938a38b92acbb5"} Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.603551 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-2vmxt" event={"ID":"ca29831b-9c1d-4c38-bf2a-c17e40542a0c","Type":"ContainerStarted","Data":"9dfba7112fdb1860ccf32e8ed0a014b6d28a4308afa6128f3095a726ea7149fa"} Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.605031 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5","Type":"ContainerStarted","Data":"0d1663febe41aae77afb1f5247c231e4f6948befa42fda2447b7cce4697080b5"} Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.606175 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-8djwn" event={"ID":"131d3017-57bd-4d6c-982c-7941d55822b9","Type":"ContainerStarted","Data":"78b79c7aecd557d59c367100d13aac29d5d79e76d94c6ee6e53dbc588e134cc9"} Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.606229 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-8djwn" event={"ID":"131d3017-57bd-4d6c-982c-7941d55822b9","Type":"ContainerStarted","Data":"ce734ba331974eef85994ff67665e5c17840f6e397ae6d67fece64d3e4beade7"} Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.607691 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nlsl4" event={"ID":"5bfcb233-910d-4200-a46c-f6d350f0e252","Type":"ContainerStarted","Data":"a44b54fe68f7cacc7dcf7b4eb361006d9cd5085d987400d4cbc32f052c88f86c"} Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.609070 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8t9jt-config-5fprt" event={"ID":"8d868715-fd50-4fdc-a8c9-77d82739beb6","Type":"ContainerDied","Data":"8d73455266286d299f39c3fdbbe24da288d78de9ed0b24e8f587343aec44c92a"} Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.609097 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d73455266286d299f39c3fdbbe24da288d78de9ed0b24e8f587343aec44c92a" Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.609142 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8t9jt-config-5fprt" Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.620739 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-j7vd8" podStartSLOduration=2.620720573 podStartE2EDuration="2.620720573s" podCreationTimestamp="2025-10-13 21:26:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:26:38.619197417 +0000 UTC m=+915.537442512" watchObservedRunningTime="2025-10-13 21:26:38.620720573 +0000 UTC m=+915.538965658" Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.644301 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-8djwn" podStartSLOduration=2.644279849 podStartE2EDuration="2.644279849s" podCreationTimestamp="2025-10-13 21:26:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:26:38.638956003 +0000 UTC m=+915.557201088" watchObservedRunningTime="2025-10-13 21:26:38.644279849 +0000 UTC m=+915.562524934" Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.791935 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-8t9jt-config-5fprt"] Oct 13 21:26:38 crc kubenswrapper[4689]: I1013 21:26:38.802768 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-8t9jt-config-5fprt"] Oct 13 21:26:39 crc kubenswrapper[4689]: I1013 21:26:39.621513 4689 generic.go:334] "Generic (PLEG): container finished" podID="eca580fb-8c72-4c1d-a917-e1e6a614e3cb" containerID="74bef32fe2ec32d7dd58994f00cf963830c4677bd17ac6e9c28409bc30fc1d54" exitCode=0 Oct 13 21:26:39 crc kubenswrapper[4689]: I1013 21:26:39.621630 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-j7vd8" event={"ID":"eca580fb-8c72-4c1d-a917-e1e6a614e3cb","Type":"ContainerDied","Data":"74bef32fe2ec32d7dd58994f00cf963830c4677bd17ac6e9c28409bc30fc1d54"} Oct 13 21:26:39 crc kubenswrapper[4689]: I1013 21:26:39.635503 4689 generic.go:334] "Generic (PLEG): container finished" podID="131d3017-57bd-4d6c-982c-7941d55822b9" containerID="78b79c7aecd557d59c367100d13aac29d5d79e76d94c6ee6e53dbc588e134cc9" exitCode=0 Oct 13 21:26:39 crc kubenswrapper[4689]: I1013 21:26:39.635648 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-8djwn" event={"ID":"131d3017-57bd-4d6c-982c-7941d55822b9","Type":"ContainerDied","Data":"78b79c7aecd557d59c367100d13aac29d5d79e76d94c6ee6e53dbc588e134cc9"} Oct 13 21:26:39 crc kubenswrapper[4689]: I1013 21:26:39.638663 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hm6gs" event={"ID":"ea77789b-b65d-4659-9169-ef628cda1bc1","Type":"ContainerStarted","Data":"9e23783a9bd750e056498c4c377674ea9ba8f78c712ebe2d94862af2489fb4b6"} Oct 13 21:26:39 crc kubenswrapper[4689]: I1013 21:26:39.643240 4689 generic.go:334] "Generic (PLEG): container finished" podID="5bfcb233-910d-4200-a46c-f6d350f0e252" containerID="5337cca8d97f7f25ac226b6c3a8b8c37c48a87f1b8755bff59f182b952ff5f20" exitCode=0 Oct 13 21:26:39 crc kubenswrapper[4689]: I1013 21:26:39.643298 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nlsl4" event={"ID":"5bfcb233-910d-4200-a46c-f6d350f0e252","Type":"ContainerDied","Data":"5337cca8d97f7f25ac226b6c3a8b8c37c48a87f1b8755bff59f182b952ff5f20"} Oct 13 21:26:39 crc kubenswrapper[4689]: I1013 21:26:39.718264 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-hm6gs" podStartSLOduration=3.256819349 podStartE2EDuration="14.718235258s" podCreationTimestamp="2025-10-13 21:26:25 +0000 UTC" firstStartedPulling="2025-10-13 21:26:26.368452409 +0000 UTC m=+903.286697504" lastFinishedPulling="2025-10-13 21:26:37.829868288 +0000 UTC m=+914.748113413" observedRunningTime="2025-10-13 21:26:39.681382989 +0000 UTC m=+916.599628084" watchObservedRunningTime="2025-10-13 21:26:39.718235258 +0000 UTC m=+916.636480333" Oct 13 21:26:39 crc kubenswrapper[4689]: I1013 21:26:39.898990 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d868715-fd50-4fdc-a8c9-77d82739beb6" path="/var/lib/kubelet/pods/8d868715-fd50-4fdc-a8c9-77d82739beb6/volumes" Oct 13 21:26:40 crc kubenswrapper[4689]: I1013 21:26:40.657436 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5","Type":"ContainerStarted","Data":"ab1332ff84b919e004614a00fdbbdccd9a55196453f7eab809a3992fc9a8f6df"} Oct 13 21:26:40 crc kubenswrapper[4689]: I1013 21:26:40.657779 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5","Type":"ContainerStarted","Data":"d474c2e0cdc66a8052b2609a3bc78151c153eaabb4f494f04e1f2d0b7b3dc5cb"} Oct 13 21:26:40 crc kubenswrapper[4689]: I1013 21:26:40.657791 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5","Type":"ContainerStarted","Data":"2a593e9a7be2334656c18e6fe5f89647ee712828e9477962c7487147b913d84b"} Oct 13 21:26:41 crc kubenswrapper[4689]: I1013 21:26:41.672828 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5","Type":"ContainerStarted","Data":"514737eaca59f60154f8637de58cb3582c36af3e60469fbfafa7bd526ce0a33a"} Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.652464 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-8djwn" Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.687926 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-j7vd8" Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.698925 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-j7vd8" Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.698939 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-j7vd8" event={"ID":"eca580fb-8c72-4c1d-a917-e1e6a614e3cb","Type":"ContainerDied","Data":"4ebd8cf0c802ebb9da7d4b218cb0fc145106a6e6ba73bce357938a38b92acbb5"} Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.699414 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ebd8cf0c802ebb9da7d4b218cb0fc145106a6e6ba73bce357938a38b92acbb5" Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.702290 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-8djwn" Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.702544 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-8djwn" event={"ID":"131d3017-57bd-4d6c-982c-7941d55822b9","Type":"ContainerDied","Data":"ce734ba331974eef85994ff67665e5c17840f6e397ae6d67fece64d3e4beade7"} Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.702577 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce734ba331974eef85994ff67665e5c17840f6e397ae6d67fece64d3e4beade7" Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.708837 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nlsl4" event={"ID":"5bfcb233-910d-4200-a46c-f6d350f0e252","Type":"ContainerDied","Data":"a44b54fe68f7cacc7dcf7b4eb361006d9cd5085d987400d4cbc32f052c88f86c"} Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.708896 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a44b54fe68f7cacc7dcf7b4eb361006d9cd5085d987400d4cbc32f052c88f86c" Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.717512 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nlsl4" Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.749183 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8gsd\" (UniqueName: \"kubernetes.io/projected/131d3017-57bd-4d6c-982c-7941d55822b9-kube-api-access-n8gsd\") pod \"131d3017-57bd-4d6c-982c-7941d55822b9\" (UID: \"131d3017-57bd-4d6c-982c-7941d55822b9\") " Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.769746 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/131d3017-57bd-4d6c-982c-7941d55822b9-kube-api-access-n8gsd" (OuterVolumeSpecName: "kube-api-access-n8gsd") pod "131d3017-57bd-4d6c-982c-7941d55822b9" (UID: "131d3017-57bd-4d6c-982c-7941d55822b9"). InnerVolumeSpecName "kube-api-access-n8gsd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.850869 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzgps\" (UniqueName: \"kubernetes.io/projected/eca580fb-8c72-4c1d-a917-e1e6a614e3cb-kube-api-access-lzgps\") pod \"eca580fb-8c72-4c1d-a917-e1e6a614e3cb\" (UID: \"eca580fb-8c72-4c1d-a917-e1e6a614e3cb\") " Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.851038 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdzh5\" (UniqueName: \"kubernetes.io/projected/5bfcb233-910d-4200-a46c-f6d350f0e252-kube-api-access-rdzh5\") pod \"5bfcb233-910d-4200-a46c-f6d350f0e252\" (UID: \"5bfcb233-910d-4200-a46c-f6d350f0e252\") " Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.852075 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8gsd\" (UniqueName: \"kubernetes.io/projected/131d3017-57bd-4d6c-982c-7941d55822b9-kube-api-access-n8gsd\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.854026 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bfcb233-910d-4200-a46c-f6d350f0e252-kube-api-access-rdzh5" (OuterVolumeSpecName: "kube-api-access-rdzh5") pod "5bfcb233-910d-4200-a46c-f6d350f0e252" (UID: "5bfcb233-910d-4200-a46c-f6d350f0e252"). InnerVolumeSpecName "kube-api-access-rdzh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.857135 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eca580fb-8c72-4c1d-a917-e1e6a614e3cb-kube-api-access-lzgps" (OuterVolumeSpecName: "kube-api-access-lzgps") pod "eca580fb-8c72-4c1d-a917-e1e6a614e3cb" (UID: "eca580fb-8c72-4c1d-a917-e1e6a614e3cb"). InnerVolumeSpecName "kube-api-access-lzgps". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.953999 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdzh5\" (UniqueName: \"kubernetes.io/projected/5bfcb233-910d-4200-a46c-f6d350f0e252-kube-api-access-rdzh5\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:43 crc kubenswrapper[4689]: I1013 21:26:43.954190 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzgps\" (UniqueName: \"kubernetes.io/projected/eca580fb-8c72-4c1d-a917-e1e6a614e3cb-kube-api-access-lzgps\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:44 crc kubenswrapper[4689]: I1013 21:26:44.731634 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5","Type":"ContainerStarted","Data":"fad8b92dfc880d2350bf9834a57f5e5545f58a0745a7170926da5136db0e8d52"} Oct 13 21:26:44 crc kubenswrapper[4689]: I1013 21:26:44.732033 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5","Type":"ContainerStarted","Data":"e25d6309ec30c1fdfbade90d10a0aab80239023c610f2349c8b4e6643b648b4c"} Oct 13 21:26:44 crc kubenswrapper[4689]: I1013 21:26:44.732043 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5","Type":"ContainerStarted","Data":"19c1a2599ad6ee904249b52128bc797d0e1bcc59f98cf739a411530f411673ed"} Oct 13 21:26:44 crc kubenswrapper[4689]: I1013 21:26:44.737078 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nlsl4" Oct 13 21:26:44 crc kubenswrapper[4689]: I1013 21:26:44.738778 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-2vmxt" event={"ID":"ca29831b-9c1d-4c38-bf2a-c17e40542a0c","Type":"ContainerStarted","Data":"59182b453705fbe6f4eda75e4f5bdc05616128de25d100706b8a4a962f9a6036"} Oct 13 21:26:44 crc kubenswrapper[4689]: I1013 21:26:44.783309 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-2vmxt" podStartSLOduration=2.775013606 podStartE2EDuration="7.783288289s" podCreationTimestamp="2025-10-13 21:26:37 +0000 UTC" firstStartedPulling="2025-10-13 21:26:38.510098045 +0000 UTC m=+915.428343130" lastFinishedPulling="2025-10-13 21:26:43.518372698 +0000 UTC m=+920.436617813" observedRunningTime="2025-10-13 21:26:44.758234468 +0000 UTC m=+921.676479553" watchObservedRunningTime="2025-10-13 21:26:44.783288289 +0000 UTC m=+921.701533374" Oct 13 21:26:45 crc kubenswrapper[4689]: I1013 21:26:45.751525 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5","Type":"ContainerStarted","Data":"79aa15413c79934a2f59bd28cae3c28d4f0dcfb8370bb4afb12039377cf6dd03"} Oct 13 21:26:46 crc kubenswrapper[4689]: E1013 21:26:46.589773 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca29831b_9c1d_4c38_bf2a_c17e40542a0c.slice/crio-conmon-59182b453705fbe6f4eda75e4f5bdc05616128de25d100706b8a4a962f9a6036.scope\": RecentStats: unable to find data in memory cache]" Oct 13 21:26:46 crc kubenswrapper[4689]: I1013 21:26:46.793789 4689 generic.go:334] "Generic (PLEG): container finished" podID="ca29831b-9c1d-4c38-bf2a-c17e40542a0c" containerID="59182b453705fbe6f4eda75e4f5bdc05616128de25d100706b8a4a962f9a6036" exitCode=0 Oct 13 21:26:46 crc kubenswrapper[4689]: I1013 21:26:46.794266 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-2vmxt" event={"ID":"ca29831b-9c1d-4c38-bf2a-c17e40542a0c","Type":"ContainerDied","Data":"59182b453705fbe6f4eda75e4f5bdc05616128de25d100706b8a4a962f9a6036"} Oct 13 21:26:46 crc kubenswrapper[4689]: I1013 21:26:46.814380 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5","Type":"ContainerStarted","Data":"71891365ee69f777c7dc1f94663c408ea3e60db18a43ca76fef51a0bbe5f67f0"} Oct 13 21:26:46 crc kubenswrapper[4689]: I1013 21:26:46.814441 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5","Type":"ContainerStarted","Data":"b85fa0021d531bd92da2b8d6252430584f4b02d83fedc467cb35629fc8025dc2"} Oct 13 21:26:46 crc kubenswrapper[4689]: I1013 21:26:46.814453 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5","Type":"ContainerStarted","Data":"38d6dfdee7523f29738bda75da2c62aa056a0c0ff748a6fed2e839dbc224e7ac"} Oct 13 21:26:47 crc kubenswrapper[4689]: I1013 21:26:47.850271 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5","Type":"ContainerStarted","Data":"3b38469be91fb852bef4fdf2c421754afaea0e6c0d8dd887c7b177270ec9636d"} Oct 13 21:26:47 crc kubenswrapper[4689]: I1013 21:26:47.850331 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5","Type":"ContainerStarted","Data":"2e492de2ecabf5ba10f5ac86e9d02ff444cc7e96f45ea177842a528e86475887"} Oct 13 21:26:47 crc kubenswrapper[4689]: I1013 21:26:47.850344 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5","Type":"ContainerStarted","Data":"011d4e5ba9cff84c30c75e8e55f4bd3df3be03d36ff179b75aff3c95e8d1ede4"} Oct 13 21:26:47 crc kubenswrapper[4689]: I1013 21:26:47.850356 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5","Type":"ContainerStarted","Data":"e09e5d411b48cdc91b93dd956911daf82273378c03d8493e3943eae32c8bd319"} Oct 13 21:26:47 crc kubenswrapper[4689]: I1013 21:26:47.857343 4689 generic.go:334] "Generic (PLEG): container finished" podID="ea77789b-b65d-4659-9169-ef628cda1bc1" containerID="9e23783a9bd750e056498c4c377674ea9ba8f78c712ebe2d94862af2489fb4b6" exitCode=0 Oct 13 21:26:47 crc kubenswrapper[4689]: I1013 21:26:47.857813 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hm6gs" event={"ID":"ea77789b-b65d-4659-9169-ef628cda1bc1","Type":"ContainerDied","Data":"9e23783a9bd750e056498c4c377674ea9ba8f78c712ebe2d94862af2489fb4b6"} Oct 13 21:26:47 crc kubenswrapper[4689]: I1013 21:26:47.921938 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=39.434762431 podStartE2EDuration="46.921916014s" podCreationTimestamp="2025-10-13 21:26:01 +0000 UTC" firstStartedPulling="2025-10-13 21:26:38.555835374 +0000 UTC m=+915.474080459" lastFinishedPulling="2025-10-13 21:26:46.042988917 +0000 UTC m=+922.961234042" observedRunningTime="2025-10-13 21:26:47.896062585 +0000 UTC m=+924.814307750" watchObservedRunningTime="2025-10-13 21:26:47.921916014 +0000 UTC m=+924.840161119" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.176400 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-jb49q"] Oct 13 21:26:48 crc kubenswrapper[4689]: E1013 21:26:48.177113 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d868715-fd50-4fdc-a8c9-77d82739beb6" containerName="ovn-config" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.177126 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d868715-fd50-4fdc-a8c9-77d82739beb6" containerName="ovn-config" Oct 13 21:26:48 crc kubenswrapper[4689]: E1013 21:26:48.177145 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="131d3017-57bd-4d6c-982c-7941d55822b9" containerName="mariadb-database-create" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.177150 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="131d3017-57bd-4d6c-982c-7941d55822b9" containerName="mariadb-database-create" Oct 13 21:26:48 crc kubenswrapper[4689]: E1013 21:26:48.177163 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bfcb233-910d-4200-a46c-f6d350f0e252" containerName="mariadb-database-create" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.177171 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bfcb233-910d-4200-a46c-f6d350f0e252" containerName="mariadb-database-create" Oct 13 21:26:48 crc kubenswrapper[4689]: E1013 21:26:48.177188 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eca580fb-8c72-4c1d-a917-e1e6a614e3cb" containerName="mariadb-database-create" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.177194 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="eca580fb-8c72-4c1d-a917-e1e6a614e3cb" containerName="mariadb-database-create" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.177355 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d868715-fd50-4fdc-a8c9-77d82739beb6" containerName="ovn-config" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.177375 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="eca580fb-8c72-4c1d-a917-e1e6a614e3cb" containerName="mariadb-database-create" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.177390 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bfcb233-910d-4200-a46c-f6d350f0e252" containerName="mariadb-database-create" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.177409 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="131d3017-57bd-4d6c-982c-7941d55822b9" containerName="mariadb-database-create" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.178221 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.180141 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.213556 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-jb49q"] Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.216687 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-2vmxt" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.246758 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.246829 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dxfc\" (UniqueName: \"kubernetes.io/projected/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-kube-api-access-9dxfc\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.246890 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.246956 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-config\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.246990 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-dns-svc\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.247029 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.348115 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbz8k\" (UniqueName: \"kubernetes.io/projected/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-kube-api-access-pbz8k\") pod \"ca29831b-9c1d-4c38-bf2a-c17e40542a0c\" (UID: \"ca29831b-9c1d-4c38-bf2a-c17e40542a0c\") " Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.348212 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-config-data\") pod \"ca29831b-9c1d-4c38-bf2a-c17e40542a0c\" (UID: \"ca29831b-9c1d-4c38-bf2a-c17e40542a0c\") " Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.348344 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-combined-ca-bundle\") pod \"ca29831b-9c1d-4c38-bf2a-c17e40542a0c\" (UID: \"ca29831b-9c1d-4c38-bf2a-c17e40542a0c\") " Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.348671 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.348715 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dxfc\" (UniqueName: \"kubernetes.io/projected/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-kube-api-access-9dxfc\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.348777 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.348834 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-config\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.348866 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-dns-svc\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.348894 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.349887 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.350053 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-dns-svc\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.350093 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.350482 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.350666 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-config\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.357522 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-kube-api-access-pbz8k" (OuterVolumeSpecName: "kube-api-access-pbz8k") pod "ca29831b-9c1d-4c38-bf2a-c17e40542a0c" (UID: "ca29831b-9c1d-4c38-bf2a-c17e40542a0c"). InnerVolumeSpecName "kube-api-access-pbz8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.370202 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dxfc\" (UniqueName: \"kubernetes.io/projected/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-kube-api-access-9dxfc\") pod \"dnsmasq-dns-764c5664d7-jb49q\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.395442 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-config-data" (OuterVolumeSpecName: "config-data") pod "ca29831b-9c1d-4c38-bf2a-c17e40542a0c" (UID: "ca29831b-9c1d-4c38-bf2a-c17e40542a0c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.397856 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ca29831b-9c1d-4c38-bf2a-c17e40542a0c" (UID: "ca29831b-9c1d-4c38-bf2a-c17e40542a0c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.451632 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.452210 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.452229 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbz8k\" (UniqueName: \"kubernetes.io/projected/ca29831b-9c1d-4c38-bf2a-c17e40542a0c-kube-api-access-pbz8k\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.539639 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.868650 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-2vmxt" event={"ID":"ca29831b-9c1d-4c38-bf2a-c17e40542a0c","Type":"ContainerDied","Data":"9dfba7112fdb1860ccf32e8ed0a014b6d28a4308afa6128f3095a726ea7149fa"} Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.868718 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9dfba7112fdb1860ccf32e8ed0a014b6d28a4308afa6128f3095a726ea7149fa" Oct 13 21:26:48 crc kubenswrapper[4689]: I1013 21:26:48.868722 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-2vmxt" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.046487 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-jb49q"] Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.057403 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-9tbs2"] Oct 13 21:26:49 crc kubenswrapper[4689]: E1013 21:26:49.057761 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca29831b-9c1d-4c38-bf2a-c17e40542a0c" containerName="keystone-db-sync" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.057774 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca29831b-9c1d-4c38-bf2a-c17e40542a0c" containerName="keystone-db-sync" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.061841 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca29831b-9c1d-4c38-bf2a-c17e40542a0c" containerName="keystone-db-sync" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.062656 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: W1013 21:26:49.065181 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c69fc2c_bcc7_486a_80d0_571311fb6ae2.slice/crio-ce3ab67e6b37e6c71f960d5d2d51ccadb7f291996e546bd2897149a197b5a4cf WatchSource:0}: Error finding container ce3ab67e6b37e6c71f960d5d2d51ccadb7f291996e546bd2897149a197b5a4cf: Status 404 returned error can't find the container with id ce3ab67e6b37e6c71f960d5d2d51ccadb7f291996e546bd2897149a197b5a4cf Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.065259 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.066935 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.067238 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-dqmvt" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.067269 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.076925 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-9tbs2"] Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.092373 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-jb49q"] Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.155061 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-fgdhv"] Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.157068 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.179882 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-credential-keys\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.179939 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-combined-ca-bundle\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.179997 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-scripts\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.180033 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wsts\" (UniqueName: \"kubernetes.io/projected/ea475aa5-1f63-4031-81b6-8144283242e7-kube-api-access-6wsts\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.180051 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-config-data\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.180089 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-fernet-keys\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.190208 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-fgdhv"] Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.238824 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-c5756884f-lw5tc"] Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.241753 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.245165 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.245381 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.245543 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.248484 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-t9j4c" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.279997 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-c5756884f-lw5tc"] Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.281226 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-dns-svc\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.281255 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46kfc\" (UniqueName: \"kubernetes.io/projected/340d2e01-fe78-435d-a002-5e254ad34204-kube-api-access-46kfc\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.281278 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wsts\" (UniqueName: \"kubernetes.io/projected/ea475aa5-1f63-4031-81b6-8144283242e7-kube-api-access-6wsts\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.281300 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-config-data\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.281653 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.281846 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-fernet-keys\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.282016 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.282139 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-config\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.282226 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-credential-keys\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.282345 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-combined-ca-bundle\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.282606 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-scripts\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.283000 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.285909 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-credential-keys\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.286188 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-config-data\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.304836 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-fernet-keys\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.305515 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-combined-ca-bundle\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.306260 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-scripts\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.334508 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wsts\" (UniqueName: \"kubernetes.io/projected/ea475aa5-1f63-4031-81b6-8144283242e7-kube-api-access-6wsts\") pod \"keystone-bootstrap-9tbs2\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.386635 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.386696 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vstqc\" (UniqueName: \"kubernetes.io/projected/5c0aa883-db89-41fe-b956-cbb3994efcc7-kube-api-access-vstqc\") pod \"horizon-c5756884f-lw5tc\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.386760 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.386800 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c0aa883-db89-41fe-b956-cbb3994efcc7-logs\") pod \"horizon-c5756884f-lw5tc\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.386818 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-config\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.386842 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5c0aa883-db89-41fe-b956-cbb3994efcc7-horizon-secret-key\") pod \"horizon-c5756884f-lw5tc\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.386856 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c0aa883-db89-41fe-b956-cbb3994efcc7-scripts\") pod \"horizon-c5756884f-lw5tc\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.386907 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c0aa883-db89-41fe-b956-cbb3994efcc7-config-data\") pod \"horizon-c5756884f-lw5tc\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.386925 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.386956 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-dns-svc\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.386971 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46kfc\" (UniqueName: \"kubernetes.io/projected/340d2e01-fe78-435d-a002-5e254ad34204-kube-api-access-46kfc\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.387740 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.388066 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.388526 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-config\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.391431 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.397052 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-dns-svc\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.414453 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46kfc\" (UniqueName: \"kubernetes.io/projected/340d2e01-fe78-435d-a002-5e254ad34204-kube-api-access-46kfc\") pod \"dnsmasq-dns-5959f8865f-fgdhv\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.414979 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.488551 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5c0aa883-db89-41fe-b956-cbb3994efcc7-horizon-secret-key\") pod \"horizon-c5756884f-lw5tc\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.488618 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c0aa883-db89-41fe-b956-cbb3994efcc7-scripts\") pod \"horizon-c5756884f-lw5tc\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.488702 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c0aa883-db89-41fe-b956-cbb3994efcc7-config-data\") pod \"horizon-c5756884f-lw5tc\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.488764 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vstqc\" (UniqueName: \"kubernetes.io/projected/5c0aa883-db89-41fe-b956-cbb3994efcc7-kube-api-access-vstqc\") pod \"horizon-c5756884f-lw5tc\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.488805 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c0aa883-db89-41fe-b956-cbb3994efcc7-logs\") pod \"horizon-c5756884f-lw5tc\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.489173 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c0aa883-db89-41fe-b956-cbb3994efcc7-logs\") pod \"horizon-c5756884f-lw5tc\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.491025 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c0aa883-db89-41fe-b956-cbb3994efcc7-config-data\") pod \"horizon-c5756884f-lw5tc\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.491418 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c0aa883-db89-41fe-b956-cbb3994efcc7-scripts\") pod \"horizon-c5756884f-lw5tc\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.496542 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5c0aa883-db89-41fe-b956-cbb3994efcc7-horizon-secret-key\") pod \"horizon-c5756884f-lw5tc\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.499509 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.509664 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-776df869fc-xvpk7"] Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.518355 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vstqc\" (UniqueName: \"kubernetes.io/projected/5c0aa883-db89-41fe-b956-cbb3994efcc7-kube-api-access-vstqc\") pod \"horizon-c5756884f-lw5tc\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.519930 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.574055 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-776df869fc-xvpk7"] Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.596731 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-horizon-secret-key\") pod \"horizon-776df869fc-xvpk7\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.597173 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcdqw\" (UniqueName: \"kubernetes.io/projected/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-kube-api-access-lcdqw\") pod \"horizon-776df869fc-xvpk7\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.597270 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-logs\") pod \"horizon-776df869fc-xvpk7\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.597324 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-config-data\") pod \"horizon-776df869fc-xvpk7\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.597379 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-scripts\") pod \"horizon-776df869fc-xvpk7\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.597499 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-fgdhv"] Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.608049 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-4cb78"] Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.617072 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-4cb78" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.623067 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.625596 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.630078 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.630294 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.630457 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-bxgvc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.634732 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.634905 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.635405 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.649074 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-4cb78"] Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.675960 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.685673 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-v8rhs"] Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.688286 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.697852 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hm6gs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.698771 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-config-data\") pod \"horizon-776df869fc-xvpk7\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.698810 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfx2w\" (UniqueName: \"kubernetes.io/projected/a37ccb01-d27b-43e7-bf7d-902a962053f4-kube-api-access-kfx2w\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.698829 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a37ccb01-d27b-43e7-bf7d-902a962053f4-log-httpd\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.698866 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.698891 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-scripts\") pod \"horizon-776df869fc-xvpk7\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.698910 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-horizon-secret-key\") pod \"horizon-776df869fc-xvpk7\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.698929 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-scripts\") pod \"placement-db-sync-4cb78\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " pod="openstack/placement-db-sync-4cb78" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.698945 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.698978 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a37ccb01-d27b-43e7-bf7d-902a962053f4-run-httpd\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.699007 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cq48s\" (UniqueName: \"kubernetes.io/projected/7a541987-5ad6-4f2f-b625-5b8105b669e5-kube-api-access-cq48s\") pod \"placement-db-sync-4cb78\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " pod="openstack/placement-db-sync-4cb78" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.699028 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcdqw\" (UniqueName: \"kubernetes.io/projected/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-kube-api-access-lcdqw\") pod \"horizon-776df869fc-xvpk7\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.699072 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-config-data\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.699090 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-config-data\") pod \"placement-db-sync-4cb78\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " pod="openstack/placement-db-sync-4cb78" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.699105 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-logs\") pod \"horizon-776df869fc-xvpk7\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.699121 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-scripts\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.699140 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-combined-ca-bundle\") pod \"placement-db-sync-4cb78\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " pod="openstack/placement-db-sync-4cb78" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.699163 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a541987-5ad6-4f2f-b625-5b8105b669e5-logs\") pod \"placement-db-sync-4cb78\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " pod="openstack/placement-db-sync-4cb78" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.700260 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-config-data\") pod \"horizon-776df869fc-xvpk7\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.701324 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-scripts\") pod \"horizon-776df869fc-xvpk7\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.701569 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-logs\") pod \"horizon-776df869fc-xvpk7\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.712846 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-horizon-secret-key\") pod \"horizon-776df869fc-xvpk7\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.717776 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-v8rhs"] Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.728615 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcdqw\" (UniqueName: \"kubernetes.io/projected/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-kube-api-access-lcdqw\") pod \"horizon-776df869fc-xvpk7\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.802979 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzrb9\" (UniqueName: \"kubernetes.io/projected/ea77789b-b65d-4659-9169-ef628cda1bc1-kube-api-access-fzrb9\") pod \"ea77789b-b65d-4659-9169-ef628cda1bc1\" (UID: \"ea77789b-b65d-4659-9169-ef628cda1bc1\") " Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.803228 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-combined-ca-bundle\") pod \"ea77789b-b65d-4659-9169-ef628cda1bc1\" (UID: \"ea77789b-b65d-4659-9169-ef628cda1bc1\") " Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.803308 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-db-sync-config-data\") pod \"ea77789b-b65d-4659-9169-ef628cda1bc1\" (UID: \"ea77789b-b65d-4659-9169-ef628cda1bc1\") " Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.803358 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-config-data\") pod \"ea77789b-b65d-4659-9169-ef628cda1bc1\" (UID: \"ea77789b-b65d-4659-9169-ef628cda1bc1\") " Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.803826 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a37ccb01-d27b-43e7-bf7d-902a962053f4-run-httpd\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.803861 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.803909 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-config\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.803943 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cq48s\" (UniqueName: \"kubernetes.io/projected/7a541987-5ad6-4f2f-b625-5b8105b669e5-kube-api-access-cq48s\") pod \"placement-db-sync-4cb78\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " pod="openstack/placement-db-sync-4cb78" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.804509 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.804556 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-config-data\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.804606 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-config-data\") pod \"placement-db-sync-4cb78\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " pod="openstack/placement-db-sync-4cb78" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.804631 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-scripts\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.804653 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-combined-ca-bundle\") pod \"placement-db-sync-4cb78\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " pod="openstack/placement-db-sync-4cb78" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.804689 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a541987-5ad6-4f2f-b625-5b8105b669e5-logs\") pod \"placement-db-sync-4cb78\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " pod="openstack/placement-db-sync-4cb78" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.804746 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zgq4\" (UniqueName: \"kubernetes.io/projected/05120f64-2a9b-40bc-9c46-4a90d628cd35-kube-api-access-4zgq4\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.804884 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfx2w\" (UniqueName: \"kubernetes.io/projected/a37ccb01-d27b-43e7-bf7d-902a962053f4-kube-api-access-kfx2w\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.804924 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a37ccb01-d27b-43e7-bf7d-902a962053f4-log-httpd\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.804948 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.804983 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.805032 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-scripts\") pod \"placement-db-sync-4cb78\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " pod="openstack/placement-db-sync-4cb78" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.805049 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.805270 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.809155 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a37ccb01-d27b-43e7-bf7d-902a962053f4-run-httpd\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.810147 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a37ccb01-d27b-43e7-bf7d-902a962053f4-log-httpd\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.812896 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-config-data\") pod \"placement-db-sync-4cb78\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " pod="openstack/placement-db-sync-4cb78" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.815200 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-config-data\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.817444 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a541987-5ad6-4f2f-b625-5b8105b669e5-logs\") pod \"placement-db-sync-4cb78\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " pod="openstack/placement-db-sync-4cb78" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.819872 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.820396 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea77789b-b65d-4659-9169-ef628cda1bc1-kube-api-access-fzrb9" (OuterVolumeSpecName: "kube-api-access-fzrb9") pod "ea77789b-b65d-4659-9169-ef628cda1bc1" (UID: "ea77789b-b65d-4659-9169-ef628cda1bc1"). InnerVolumeSpecName "kube-api-access-fzrb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.830715 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.840144 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-combined-ca-bundle\") pod \"placement-db-sync-4cb78\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " pod="openstack/placement-db-sync-4cb78" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.842822 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "ea77789b-b65d-4659-9169-ef628cda1bc1" (UID: "ea77789b-b65d-4659-9169-ef628cda1bc1"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.855519 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-scripts\") pod \"placement-db-sync-4cb78\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " pod="openstack/placement-db-sync-4cb78" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.865249 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfx2w\" (UniqueName: \"kubernetes.io/projected/a37ccb01-d27b-43e7-bf7d-902a962053f4-kube-api-access-kfx2w\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.867294 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-scripts\") pod \"ceilometer-0\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " pod="openstack/ceilometer-0" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.882326 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cq48s\" (UniqueName: \"kubernetes.io/projected/7a541987-5ad6-4f2f-b625-5b8105b669e5-kube-api-access-cq48s\") pod \"placement-db-sync-4cb78\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " pod="openstack/placement-db-sync-4cb78" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.907249 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zgq4\" (UniqueName: \"kubernetes.io/projected/05120f64-2a9b-40bc-9c46-4a90d628cd35-kube-api-access-4zgq4\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.907320 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.907354 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.907398 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.907419 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-config\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.907466 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.907542 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzrb9\" (UniqueName: \"kubernetes.io/projected/ea77789b-b65d-4659-9169-ef628cda1bc1-kube-api-access-fzrb9\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.907557 4689 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.908606 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.911411 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.916720 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.918580 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hm6gs" event={"ID":"ea77789b-b65d-4659-9169-ef628cda1bc1","Type":"ContainerDied","Data":"621c0c0d4d465a67cd717c16e3634a9ec20592a26b477b3d5df102de9646476f"} Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.918639 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="621c0c0d4d465a67cd717c16e3634a9ec20592a26b477b3d5df102de9646476f" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.918701 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hm6gs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.918796 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-config\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.925637 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.931889 4689 generic.go:334] "Generic (PLEG): container finished" podID="6c69fc2c-bcc7-486a-80d0-571311fb6ae2" containerID="a74a036c0061c61e77657c9efdccd9d0549f7f9d89703168f6e763e1207971d1" exitCode=0 Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.931946 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-jb49q" event={"ID":"6c69fc2c-bcc7-486a-80d0-571311fb6ae2","Type":"ContainerDied","Data":"a74a036c0061c61e77657c9efdccd9d0549f7f9d89703168f6e763e1207971d1"} Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.931988 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-jb49q" event={"ID":"6c69fc2c-bcc7-486a-80d0-571311fb6ae2","Type":"ContainerStarted","Data":"ce3ab67e6b37e6c71f960d5d2d51ccadb7f291996e546bd2897149a197b5a4cf"} Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.957272 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ea77789b-b65d-4659-9169-ef628cda1bc1" (UID: "ea77789b-b65d-4659-9169-ef628cda1bc1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.958977 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zgq4\" (UniqueName: \"kubernetes.io/projected/05120f64-2a9b-40bc-9c46-4a90d628cd35-kube-api-access-4zgq4\") pod \"dnsmasq-dns-58dd9ff6bc-v8rhs\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.970922 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-config-data" (OuterVolumeSpecName: "config-data") pod "ea77789b-b65d-4659-9169-ef628cda1bc1" (UID: "ea77789b-b65d-4659-9169-ef628cda1bc1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:26:49 crc kubenswrapper[4689]: I1013 21:26:49.990942 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.011498 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.011562 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea77789b-b65d-4659-9169-ef628cda1bc1-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.021947 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-4cb78" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.028092 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.056965 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.064410 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-9tbs2"] Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.303431 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-fgdhv"] Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.318878 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-v8rhs"] Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.487415 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-dgjln"] Oct 13 21:26:50 crc kubenswrapper[4689]: E1013 21:26:50.488294 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea77789b-b65d-4659-9169-ef628cda1bc1" containerName="glance-db-sync" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.488307 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea77789b-b65d-4659-9169-ef628cda1bc1" containerName="glance-db-sync" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.488497 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea77789b-b65d-4659-9169-ef628cda1bc1" containerName="glance-db-sync" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.489354 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.526014 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-c5756884f-lw5tc"] Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.547719 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-dgjln"] Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.592464 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.611011 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g982t\" (UniqueName: \"kubernetes.io/projected/28327f70-7d3e-489e-8c99-85d7d4716534-kube-api-access-g982t\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.611137 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.611488 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.611537 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-config\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.611624 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.714623 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.715017 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-config\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.715053 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.715087 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.715121 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g982t\" (UniqueName: \"kubernetes.io/projected/28327f70-7d3e-489e-8c99-85d7d4716534-kube-api-access-g982t\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.715153 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.716268 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.725504 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.725926 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.726380 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.772179 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-config\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.795604 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-776df869fc-xvpk7"] Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.800659 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g982t\" (UniqueName: \"kubernetes.io/projected/28327f70-7d3e-489e-8c99-85d7d4716534-kube-api-access-g982t\") pod \"dnsmasq-dns-785d8bcb8c-dgjln\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.871219 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.919188 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-ovsdbserver-sb\") pod \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.919237 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-dns-swift-storage-0\") pod \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.919327 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-config\") pod \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.919378 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-ovsdbserver-nb\") pod \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.919472 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-dns-svc\") pod \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.919498 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dxfc\" (UniqueName: \"kubernetes.io/projected/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-kube-api-access-9dxfc\") pod \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\" (UID: \"6c69fc2c-bcc7-486a-80d0-571311fb6ae2\") " Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.930324 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-kube-api-access-9dxfc" (OuterVolumeSpecName: "kube-api-access-9dxfc") pod "6c69fc2c-bcc7-486a-80d0-571311fb6ae2" (UID: "6c69fc2c-bcc7-486a-80d0-571311fb6ae2"). InnerVolumeSpecName "kube-api-access-9dxfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.951123 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-776df869fc-xvpk7" event={"ID":"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f","Type":"ContainerStarted","Data":"be623f115eb710961baec56bcc93397a8897304e2d431339f66e217e627be345"} Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.965488 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" event={"ID":"340d2e01-fe78-435d-a002-5e254ad34204","Type":"ContainerStarted","Data":"9f60623f3da349f37dd70f56f1ebddf2ca6825b898e1e86b0212fc6486f10af8"} Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.968099 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-jb49q" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.968108 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-jb49q" event={"ID":"6c69fc2c-bcc7-486a-80d0-571311fb6ae2","Type":"ContainerDied","Data":"ce3ab67e6b37e6c71f960d5d2d51ccadb7f291996e546bd2897149a197b5a4cf"} Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.968183 4689 scope.go:117] "RemoveContainer" containerID="a74a036c0061c61e77657c9efdccd9d0549f7f9d89703168f6e763e1207971d1" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.968835 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6c69fc2c-bcc7-486a-80d0-571311fb6ae2" (UID: "6c69fc2c-bcc7-486a-80d0-571311fb6ae2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.971942 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9tbs2" event={"ID":"ea475aa5-1f63-4031-81b6-8144283242e7","Type":"ContainerStarted","Data":"f375453b2357513c49efb6de42a774c3abf7e630ce859dc496580e4d8f9d2618"} Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.973940 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-config" (OuterVolumeSpecName: "config") pod "6c69fc2c-bcc7-486a-80d0-571311fb6ae2" (UID: "6c69fc2c-bcc7-486a-80d0-571311fb6ae2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.976514 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6c69fc2c-bcc7-486a-80d0-571311fb6ae2" (UID: "6c69fc2c-bcc7-486a-80d0-571311fb6ae2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.976663 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c5756884f-lw5tc" event={"ID":"5c0aa883-db89-41fe-b956-cbb3994efcc7","Type":"ContainerStarted","Data":"582a544cb31d5584c9045318e61cebbb31bf96375b21226ac87210521f1392a4"} Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.988891 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6c69fc2c-bcc7-486a-80d0-571311fb6ae2" (UID: "6c69fc2c-bcc7-486a-80d0-571311fb6ae2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:50 crc kubenswrapper[4689]: I1013 21:26:50.994475 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6c69fc2c-bcc7-486a-80d0-571311fb6ae2" (UID: "6c69fc2c-bcc7-486a-80d0-571311fb6ae2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.021156 4689 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.021192 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dxfc\" (UniqueName: \"kubernetes.io/projected/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-kube-api-access-9dxfc\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.021209 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.021218 4689 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.021227 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.021238 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6c69fc2c-bcc7-486a-80d0-571311fb6ae2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.075199 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.094380 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:26:51 crc kubenswrapper[4689]: W1013 21:26:51.119003 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda37ccb01_d27b_43e7_bf7d_902a962053f4.slice/crio-02bf9fbb4b3a9ae0b1c38fd211e9d57319e4a6b45701c17d3359dca2922e4c38 WatchSource:0}: Error finding container 02bf9fbb4b3a9ae0b1c38fd211e9d57319e4a6b45701c17d3359dca2922e4c38: Status 404 returned error can't find the container with id 02bf9fbb4b3a9ae0b1c38fd211e9d57319e4a6b45701c17d3359dca2922e4c38 Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.121028 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-4cb78"] Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.216298 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 21:26:51 crc kubenswrapper[4689]: E1013 21:26:51.216909 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c69fc2c-bcc7-486a-80d0-571311fb6ae2" containerName="init" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.216928 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c69fc2c-bcc7-486a-80d0-571311fb6ae2" containerName="init" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.217142 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c69fc2c-bcc7-486a-80d0-571311fb6ae2" containerName="init" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.218162 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.223463 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.223830 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.223980 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-9jnhx" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.249960 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-v8rhs"] Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.269562 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.381219 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-jb49q"] Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.390980 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-jb49q"] Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.436422 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8r2n\" (UniqueName: \"kubernetes.io/projected/e0a2c0f5-6c89-4f10-8eba-623190fd162b-kube-api-access-m8r2n\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.436480 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0a2c0f5-6c89-4f10-8eba-623190fd162b-logs\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.436538 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.438104 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e0a2c0f5-6c89-4f10-8eba-623190fd162b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.441475 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-config-data\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.441609 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-scripts\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.441702 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.553237 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-scripts\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.553341 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.553432 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8r2n\" (UniqueName: \"kubernetes.io/projected/e0a2c0f5-6c89-4f10-8eba-623190fd162b-kube-api-access-m8r2n\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.553478 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0a2c0f5-6c89-4f10-8eba-623190fd162b-logs\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.553793 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.553868 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e0a2c0f5-6c89-4f10-8eba-623190fd162b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.554052 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-config-data\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.554893 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0a2c0f5-6c89-4f10-8eba-623190fd162b-logs\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.555171 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.555630 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e0a2c0f5-6c89-4f10-8eba-623190fd162b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.555711 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.557674 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.562068 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.574083 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.574652 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-scripts\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.580618 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.594579 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-config-data\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.603849 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8r2n\" (UniqueName: \"kubernetes.io/projected/e0a2c0f5-6c89-4f10-8eba-623190fd162b-kube-api-access-m8r2n\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.624430 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.655964 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mw4t2\" (UniqueName: \"kubernetes.io/projected/37236ceb-29ab-4e07-9e6f-6642dbed073c-kube-api-access-mw4t2\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.656013 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37236ceb-29ab-4e07-9e6f-6642dbed073c-logs\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.656064 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.656130 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.656228 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.656383 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.656417 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/37236ceb-29ab-4e07-9e6f-6642dbed073c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.660887 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-dgjln"] Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.757523 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.758166 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.758189 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/37236ceb-29ab-4e07-9e6f-6642dbed073c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.758887 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mw4t2\" (UniqueName: \"kubernetes.io/projected/37236ceb-29ab-4e07-9e6f-6642dbed073c-kube-api-access-mw4t2\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.758912 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37236ceb-29ab-4e07-9e6f-6642dbed073c-logs\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.758938 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.758974 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.760228 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/37236ceb-29ab-4e07-9e6f-6642dbed073c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.760270 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37236ceb-29ab-4e07-9e6f-6642dbed073c-logs\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.758246 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.763249 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.764489 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.764909 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.774705 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mw4t2\" (UniqueName: \"kubernetes.io/projected/37236ceb-29ab-4e07-9e6f-6642dbed073c-kube-api-access-mw4t2\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.796241 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.866278 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.888383 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c69fc2c-bcc7-486a-80d0-571311fb6ae2" path="/var/lib/kubelet/pods/6c69fc2c-bcc7-486a-80d0-571311fb6ae2/volumes" Oct 13 21:26:51 crc kubenswrapper[4689]: I1013 21:26:51.961228 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.049145 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9tbs2" event={"ID":"ea475aa5-1f63-4031-81b6-8144283242e7","Type":"ContainerStarted","Data":"0521ae12e45071004a2a2c5f88eae109b038af98b43728296a634db0b47efd07"} Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.066300 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" event={"ID":"28327f70-7d3e-489e-8c99-85d7d4716534","Type":"ContainerStarted","Data":"bf132da6b42d038f48fa05919ce8e6d608c1c62a42bd3b7e30908a2c38fd99bd"} Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.066382 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" event={"ID":"28327f70-7d3e-489e-8c99-85d7d4716534","Type":"ContainerStarted","Data":"c3cee51d9c5b156a6ba4742b89c2515a272613ce71294b7e5952fbe023e28321"} Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.073140 4689 generic.go:334] "Generic (PLEG): container finished" podID="340d2e01-fe78-435d-a002-5e254ad34204" containerID="49e53bde466ddf9b336d256aa5988614959cc54111c5dd72914c18b08c455da3" exitCode=0 Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.073215 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" event={"ID":"340d2e01-fe78-435d-a002-5e254ad34204","Type":"ContainerDied","Data":"49e53bde466ddf9b336d256aa5988614959cc54111c5dd72914c18b08c455da3"} Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.074485 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-9tbs2" podStartSLOduration=3.074473133 podStartE2EDuration="3.074473133s" podCreationTimestamp="2025-10-13 21:26:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:26:52.069561566 +0000 UTC m=+928.987806651" watchObservedRunningTime="2025-10-13 21:26:52.074473133 +0000 UTC m=+928.992718218" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.116600 4689 generic.go:334] "Generic (PLEG): container finished" podID="05120f64-2a9b-40bc-9c46-4a90d628cd35" containerID="207d4f0e91b89d3a338a67a46ce77ab4f93c33755c724e0250d40046f4e27895" exitCode=0 Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.116708 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" event={"ID":"05120f64-2a9b-40bc-9c46-4a90d628cd35","Type":"ContainerDied","Data":"207d4f0e91b89d3a338a67a46ce77ab4f93c33755c724e0250d40046f4e27895"} Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.116742 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" event={"ID":"05120f64-2a9b-40bc-9c46-4a90d628cd35","Type":"ContainerStarted","Data":"e33c420e9568d4c6295de5fbc9b2f39b3e3cbbedd7093e0e457b2f56677a9a92"} Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.139478 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-4cb78" event={"ID":"7a541987-5ad6-4f2f-b625-5b8105b669e5","Type":"ContainerStarted","Data":"376b6793ff24e324dbb76b37873e298a6384bbead4cda6687e04d756ec286310"} Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.141729 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a37ccb01-d27b-43e7-bf7d-902a962053f4","Type":"ContainerStarted","Data":"02bf9fbb4b3a9ae0b1c38fd211e9d57319e4a6b45701c17d3359dca2922e4c38"} Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.516711 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.590909 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.671690 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-776df869fc-xvpk7"] Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.696001 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-ovsdbserver-sb\") pod \"05120f64-2a9b-40bc-9c46-4a90d628cd35\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.696056 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-dns-svc\") pod \"05120f64-2a9b-40bc-9c46-4a90d628cd35\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.696117 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-config\") pod \"05120f64-2a9b-40bc-9c46-4a90d628cd35\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.696222 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zgq4\" (UniqueName: \"kubernetes.io/projected/05120f64-2a9b-40bc-9c46-4a90d628cd35-kube-api-access-4zgq4\") pod \"05120f64-2a9b-40bc-9c46-4a90d628cd35\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.696332 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-dns-swift-storage-0\") pod \"05120f64-2a9b-40bc-9c46-4a90d628cd35\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.696378 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-ovsdbserver-nb\") pod \"05120f64-2a9b-40bc-9c46-4a90d628cd35\" (UID: \"05120f64-2a9b-40bc-9c46-4a90d628cd35\") " Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.712929 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.730781 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05120f64-2a9b-40bc-9c46-4a90d628cd35-kube-api-access-4zgq4" (OuterVolumeSpecName: "kube-api-access-4zgq4") pod "05120f64-2a9b-40bc-9c46-4a90d628cd35" (UID: "05120f64-2a9b-40bc-9c46-4a90d628cd35"). InnerVolumeSpecName "kube-api-access-4zgq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.779128 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "05120f64-2a9b-40bc-9c46-4a90d628cd35" (UID: "05120f64-2a9b-40bc-9c46-4a90d628cd35"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.779314 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-d5fb75fdf-9bkv8"] Oct 13 21:26:52 crc kubenswrapper[4689]: E1013 21:26:52.780127 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05120f64-2a9b-40bc-9c46-4a90d628cd35" containerName="init" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.780144 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="05120f64-2a9b-40bc-9c46-4a90d628cd35" containerName="init" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.780388 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="05120f64-2a9b-40bc-9c46-4a90d628cd35" containerName="init" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.790788 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "05120f64-2a9b-40bc-9c46-4a90d628cd35" (UID: "05120f64-2a9b-40bc-9c46-4a90d628cd35"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.794064 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "05120f64-2a9b-40bc-9c46-4a90d628cd35" (UID: "05120f64-2a9b-40bc-9c46-4a90d628cd35"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.794606 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-config" (OuterVolumeSpecName: "config") pod "05120f64-2a9b-40bc-9c46-4a90d628cd35" (UID: "05120f64-2a9b-40bc-9c46-4a90d628cd35"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.796945 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.801328 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.805634 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.805667 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.805678 4689 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.805723 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.805737 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zgq4\" (UniqueName: \"kubernetes.io/projected/05120f64-2a9b-40bc-9c46-4a90d628cd35-kube-api-access-4zgq4\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.821653 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-d5fb75fdf-9bkv8"] Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.827480 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "05120f64-2a9b-40bc-9c46-4a90d628cd35" (UID: "05120f64-2a9b-40bc-9c46-4a90d628cd35"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.832695 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.844041 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.908242 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-dns-swift-storage-0\") pod \"340d2e01-fe78-435d-a002-5e254ad34204\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.908398 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-dns-svc\") pod \"340d2e01-fe78-435d-a002-5e254ad34204\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.908431 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-ovsdbserver-sb\") pod \"340d2e01-fe78-435d-a002-5e254ad34204\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.908458 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-config\") pod \"340d2e01-fe78-435d-a002-5e254ad34204\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.908486 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46kfc\" (UniqueName: \"kubernetes.io/projected/340d2e01-fe78-435d-a002-5e254ad34204-kube-api-access-46kfc\") pod \"340d2e01-fe78-435d-a002-5e254ad34204\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.913138 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-ovsdbserver-nb\") pod \"340d2e01-fe78-435d-a002-5e254ad34204\" (UID: \"340d2e01-fe78-435d-a002-5e254ad34204\") " Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.913408 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5f582acd-f393-4d97-a002-186f0ccb1c86-horizon-secret-key\") pod \"horizon-d5fb75fdf-9bkv8\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.913550 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lm24n\" (UniqueName: \"kubernetes.io/projected/5f582acd-f393-4d97-a002-186f0ccb1c86-kube-api-access-lm24n\") pod \"horizon-d5fb75fdf-9bkv8\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.913745 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f582acd-f393-4d97-a002-186f0ccb1c86-scripts\") pod \"horizon-d5fb75fdf-9bkv8\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.913776 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5f582acd-f393-4d97-a002-186f0ccb1c86-config-data\") pod \"horizon-d5fb75fdf-9bkv8\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.913791 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f582acd-f393-4d97-a002-186f0ccb1c86-logs\") pod \"horizon-d5fb75fdf-9bkv8\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.913886 4689 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/05120f64-2a9b-40bc-9c46-4a90d628cd35-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.925212 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/340d2e01-fe78-435d-a002-5e254ad34204-kube-api-access-46kfc" (OuterVolumeSpecName: "kube-api-access-46kfc") pod "340d2e01-fe78-435d-a002-5e254ad34204" (UID: "340d2e01-fe78-435d-a002-5e254ad34204"). InnerVolumeSpecName "kube-api-access-46kfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.933843 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "340d2e01-fe78-435d-a002-5e254ad34204" (UID: "340d2e01-fe78-435d-a002-5e254ad34204"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.934571 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "340d2e01-fe78-435d-a002-5e254ad34204" (UID: "340d2e01-fe78-435d-a002-5e254ad34204"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.944241 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "340d2e01-fe78-435d-a002-5e254ad34204" (UID: "340d2e01-fe78-435d-a002-5e254ad34204"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.956476 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-config" (OuterVolumeSpecName: "config") pod "340d2e01-fe78-435d-a002-5e254ad34204" (UID: "340d2e01-fe78-435d-a002-5e254ad34204"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.961488 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "340d2e01-fe78-435d-a002-5e254ad34204" (UID: "340d2e01-fe78-435d-a002-5e254ad34204"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:26:52 crc kubenswrapper[4689]: I1013 21:26:52.972723 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.016444 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5f582acd-f393-4d97-a002-186f0ccb1c86-config-data\") pod \"horizon-d5fb75fdf-9bkv8\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.016518 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f582acd-f393-4d97-a002-186f0ccb1c86-logs\") pod \"horizon-d5fb75fdf-9bkv8\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.016713 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5f582acd-f393-4d97-a002-186f0ccb1c86-horizon-secret-key\") pod \"horizon-d5fb75fdf-9bkv8\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.016933 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lm24n\" (UniqueName: \"kubernetes.io/projected/5f582acd-f393-4d97-a002-186f0ccb1c86-kube-api-access-lm24n\") pod \"horizon-d5fb75fdf-9bkv8\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.017146 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f582acd-f393-4d97-a002-186f0ccb1c86-scripts\") pod \"horizon-d5fb75fdf-9bkv8\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.017204 4689 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.017217 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.017231 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.017242 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46kfc\" (UniqueName: \"kubernetes.io/projected/340d2e01-fe78-435d-a002-5e254ad34204-kube-api-access-46kfc\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.017253 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.017264 4689 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/340d2e01-fe78-435d-a002-5e254ad34204-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.020350 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5f582acd-f393-4d97-a002-186f0ccb1c86-config-data\") pod \"horizon-d5fb75fdf-9bkv8\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.020753 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f582acd-f393-4d97-a002-186f0ccb1c86-logs\") pod \"horizon-d5fb75fdf-9bkv8\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.023762 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f582acd-f393-4d97-a002-186f0ccb1c86-scripts\") pod \"horizon-d5fb75fdf-9bkv8\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.032453 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5f582acd-f393-4d97-a002-186f0ccb1c86-horizon-secret-key\") pod \"horizon-d5fb75fdf-9bkv8\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.047747 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lm24n\" (UniqueName: \"kubernetes.io/projected/5f582acd-f393-4d97-a002-186f0ccb1c86-kube-api-access-lm24n\") pod \"horizon-d5fb75fdf-9bkv8\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.144177 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.171665 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"37236ceb-29ab-4e07-9e6f-6642dbed073c","Type":"ContainerStarted","Data":"6718872e6d207f762ee8a1638054495ec2f1e3b084d2d1a8fe05fbba6936d3d7"} Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.175556 4689 generic.go:334] "Generic (PLEG): container finished" podID="28327f70-7d3e-489e-8c99-85d7d4716534" containerID="bf132da6b42d038f48fa05919ce8e6d608c1c62a42bd3b7e30908a2c38fd99bd" exitCode=0 Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.175636 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" event={"ID":"28327f70-7d3e-489e-8c99-85d7d4716534","Type":"ContainerDied","Data":"bf132da6b42d038f48fa05919ce8e6d608c1c62a42bd3b7e30908a2c38fd99bd"} Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.175663 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" event={"ID":"28327f70-7d3e-489e-8c99-85d7d4716534","Type":"ContainerStarted","Data":"fa96089fee36a380541dfb45c9de73e9efc20c4d03bf05d7180ecce38cfbb864"} Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.178087 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" event={"ID":"340d2e01-fe78-435d-a002-5e254ad34204","Type":"ContainerDied","Data":"9f60623f3da349f37dd70f56f1ebddf2ca6825b898e1e86b0212fc6486f10af8"} Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.178142 4689 scope.go:117] "RemoveContainer" containerID="49e53bde466ddf9b336d256aa5988614959cc54111c5dd72914c18b08c455da3" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.178278 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-fgdhv" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.188289 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e0a2c0f5-6c89-4f10-8eba-623190fd162b","Type":"ContainerStarted","Data":"109408d8ea12ff35c1b5ca660d7fee17d569b6040e1eb1e19bee7c32b5e0ebc0"} Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.205502 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" event={"ID":"05120f64-2a9b-40bc-9c46-4a90d628cd35","Type":"ContainerDied","Data":"e33c420e9568d4c6295de5fbc9b2f39b3e3cbbedd7093e0e457b2f56677a9a92"} Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.205547 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-v8rhs" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.239477 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-fgdhv"] Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.251140 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-fgdhv"] Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.252339 4689 scope.go:117] "RemoveContainer" containerID="207d4f0e91b89d3a338a67a46ce77ab4f93c33755c724e0250d40046f4e27895" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.307330 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-v8rhs"] Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.323082 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-v8rhs"] Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.773409 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-d5fb75fdf-9bkv8"] Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.889602 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05120f64-2a9b-40bc-9c46-4a90d628cd35" path="/var/lib/kubelet/pods/05120f64-2a9b-40bc-9c46-4a90d628cd35/volumes" Oct 13 21:26:53 crc kubenswrapper[4689]: I1013 21:26:53.890563 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="340d2e01-fe78-435d-a002-5e254ad34204" path="/var/lib/kubelet/pods/340d2e01-fe78-435d-a002-5e254ad34204/volumes" Oct 13 21:26:54 crc kubenswrapper[4689]: I1013 21:26:54.222568 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d5fb75fdf-9bkv8" event={"ID":"5f582acd-f393-4d97-a002-186f0ccb1c86","Type":"ContainerStarted","Data":"b66f0d0670747c6defbd0fdf098b8770216733b057cfd3d25c963596a949098a"} Oct 13 21:26:55 crc kubenswrapper[4689]: I1013 21:26:55.240299 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e0a2c0f5-6c89-4f10-8eba-623190fd162b","Type":"ContainerStarted","Data":"10026754ade7952a531a6c0d31017c5426935f0445c97a451ea36b5a28923c9d"} Oct 13 21:26:55 crc kubenswrapper[4689]: I1013 21:26:55.243624 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"37236ceb-29ab-4e07-9e6f-6642dbed073c","Type":"ContainerStarted","Data":"a8f99b34a010cae954d35404b0fc0a17b3f3e7105ab77f4c1923383d7b38bb76"} Oct 13 21:26:55 crc kubenswrapper[4689]: I1013 21:26:55.244211 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:26:56 crc kubenswrapper[4689]: I1013 21:26:56.256535 4689 generic.go:334] "Generic (PLEG): container finished" podID="ea475aa5-1f63-4031-81b6-8144283242e7" containerID="0521ae12e45071004a2a2c5f88eae109b038af98b43728296a634db0b47efd07" exitCode=0 Oct 13 21:26:56 crc kubenswrapper[4689]: I1013 21:26:56.256625 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9tbs2" event={"ID":"ea475aa5-1f63-4031-81b6-8144283242e7","Type":"ContainerDied","Data":"0521ae12e45071004a2a2c5f88eae109b038af98b43728296a634db0b47efd07"} Oct 13 21:26:56 crc kubenswrapper[4689]: I1013 21:26:56.282395 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" podStartSLOduration=6.282369703 podStartE2EDuration="6.282369703s" podCreationTimestamp="2025-10-13 21:26:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:26:55.272157827 +0000 UTC m=+932.190403062" watchObservedRunningTime="2025-10-13 21:26:56.282369703 +0000 UTC m=+933.200614788" Oct 13 21:26:56 crc kubenswrapper[4689]: I1013 21:26:56.786512 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-93c8-account-create-qzmql"] Oct 13 21:26:56 crc kubenswrapper[4689]: E1013 21:26:56.787448 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="340d2e01-fe78-435d-a002-5e254ad34204" containerName="init" Oct 13 21:26:56 crc kubenswrapper[4689]: I1013 21:26:56.787465 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="340d2e01-fe78-435d-a002-5e254ad34204" containerName="init" Oct 13 21:26:56 crc kubenswrapper[4689]: I1013 21:26:56.787690 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="340d2e01-fe78-435d-a002-5e254ad34204" containerName="init" Oct 13 21:26:56 crc kubenswrapper[4689]: I1013 21:26:56.788380 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-93c8-account-create-qzmql" Oct 13 21:26:56 crc kubenswrapper[4689]: I1013 21:26:56.790447 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 13 21:26:56 crc kubenswrapper[4689]: I1013 21:26:56.793041 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-93c8-account-create-qzmql"] Oct 13 21:26:56 crc kubenswrapper[4689]: I1013 21:26:56.882771 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-8dc8-account-create-h9cqt"] Oct 13 21:26:56 crc kubenswrapper[4689]: I1013 21:26:56.885052 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8dc8-account-create-h9cqt" Oct 13 21:26:56 crc kubenswrapper[4689]: I1013 21:26:56.888223 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 13 21:26:56 crc kubenswrapper[4689]: I1013 21:26:56.894477 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8dc8-account-create-h9cqt"] Oct 13 21:26:56 crc kubenswrapper[4689]: I1013 21:26:56.905141 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpp2l\" (UniqueName: \"kubernetes.io/projected/06e1fd87-93da-41a7-8e6d-6477bcaa7bec-kube-api-access-kpp2l\") pod \"cinder-93c8-account-create-qzmql\" (UID: \"06e1fd87-93da-41a7-8e6d-6477bcaa7bec\") " pod="openstack/cinder-93c8-account-create-qzmql" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.006697 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k47sp\" (UniqueName: \"kubernetes.io/projected/8e0d19eb-6990-416e-b3cc-3f696c23146a-kube-api-access-k47sp\") pod \"barbican-8dc8-account-create-h9cqt\" (UID: \"8e0d19eb-6990-416e-b3cc-3f696c23146a\") " pod="openstack/barbican-8dc8-account-create-h9cqt" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.006754 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpp2l\" (UniqueName: \"kubernetes.io/projected/06e1fd87-93da-41a7-8e6d-6477bcaa7bec-kube-api-access-kpp2l\") pod \"cinder-93c8-account-create-qzmql\" (UID: \"06e1fd87-93da-41a7-8e6d-6477bcaa7bec\") " pod="openstack/cinder-93c8-account-create-qzmql" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.032733 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpp2l\" (UniqueName: \"kubernetes.io/projected/06e1fd87-93da-41a7-8e6d-6477bcaa7bec-kube-api-access-kpp2l\") pod \"cinder-93c8-account-create-qzmql\" (UID: \"06e1fd87-93da-41a7-8e6d-6477bcaa7bec\") " pod="openstack/cinder-93c8-account-create-qzmql" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.095429 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-2740-account-create-cjdts"] Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.098782 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2740-account-create-cjdts" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.101964 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.113044 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k47sp\" (UniqueName: \"kubernetes.io/projected/8e0d19eb-6990-416e-b3cc-3f696c23146a-kube-api-access-k47sp\") pod \"barbican-8dc8-account-create-h9cqt\" (UID: \"8e0d19eb-6990-416e-b3cc-3f696c23146a\") " pod="openstack/barbican-8dc8-account-create-h9cqt" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.133283 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-2740-account-create-cjdts"] Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.158440 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-93c8-account-create-qzmql" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.162481 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k47sp\" (UniqueName: \"kubernetes.io/projected/8e0d19eb-6990-416e-b3cc-3f696c23146a-kube-api-access-k47sp\") pod \"barbican-8dc8-account-create-h9cqt\" (UID: \"8e0d19eb-6990-416e-b3cc-3f696c23146a\") " pod="openstack/barbican-8dc8-account-create-h9cqt" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.209371 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8dc8-account-create-h9cqt" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.215524 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bzcl\" (UniqueName: \"kubernetes.io/projected/25e7989d-06bf-4909-848c-92a353d7be6f-kube-api-access-5bzcl\") pod \"neutron-2740-account-create-cjdts\" (UID: \"25e7989d-06bf-4909-848c-92a353d7be6f\") " pod="openstack/neutron-2740-account-create-cjdts" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.317100 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bzcl\" (UniqueName: \"kubernetes.io/projected/25e7989d-06bf-4909-848c-92a353d7be6f-kube-api-access-5bzcl\") pod \"neutron-2740-account-create-cjdts\" (UID: \"25e7989d-06bf-4909-848c-92a353d7be6f\") " pod="openstack/neutron-2740-account-create-cjdts" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.358811 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bzcl\" (UniqueName: \"kubernetes.io/projected/25e7989d-06bf-4909-848c-92a353d7be6f-kube-api-access-5bzcl\") pod \"neutron-2740-account-create-cjdts\" (UID: \"25e7989d-06bf-4909-848c-92a353d7be6f\") " pod="openstack/neutron-2740-account-create-cjdts" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.432596 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2740-account-create-cjdts" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.857841 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.927703 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-fernet-keys\") pod \"ea475aa5-1f63-4031-81b6-8144283242e7\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.927857 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wsts\" (UniqueName: \"kubernetes.io/projected/ea475aa5-1f63-4031-81b6-8144283242e7-kube-api-access-6wsts\") pod \"ea475aa5-1f63-4031-81b6-8144283242e7\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.927887 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-config-data\") pod \"ea475aa5-1f63-4031-81b6-8144283242e7\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.928026 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-combined-ca-bundle\") pod \"ea475aa5-1f63-4031-81b6-8144283242e7\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.928113 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-scripts\") pod \"ea475aa5-1f63-4031-81b6-8144283242e7\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.928149 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-credential-keys\") pod \"ea475aa5-1f63-4031-81b6-8144283242e7\" (UID: \"ea475aa5-1f63-4031-81b6-8144283242e7\") " Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.931779 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "ea475aa5-1f63-4031-81b6-8144283242e7" (UID: "ea475aa5-1f63-4031-81b6-8144283242e7"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.932970 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ea475aa5-1f63-4031-81b6-8144283242e7" (UID: "ea475aa5-1f63-4031-81b6-8144283242e7"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.939265 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea475aa5-1f63-4031-81b6-8144283242e7-kube-api-access-6wsts" (OuterVolumeSpecName: "kube-api-access-6wsts") pod "ea475aa5-1f63-4031-81b6-8144283242e7" (UID: "ea475aa5-1f63-4031-81b6-8144283242e7"). InnerVolumeSpecName "kube-api-access-6wsts". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.940643 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-scripts" (OuterVolumeSpecName: "scripts") pod "ea475aa5-1f63-4031-81b6-8144283242e7" (UID: "ea475aa5-1f63-4031-81b6-8144283242e7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.959737 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ea475aa5-1f63-4031-81b6-8144283242e7" (UID: "ea475aa5-1f63-4031-81b6-8144283242e7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:26:57 crc kubenswrapper[4689]: I1013 21:26:57.968376 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-config-data" (OuterVolumeSpecName: "config-data") pod "ea475aa5-1f63-4031-81b6-8144283242e7" (UID: "ea475aa5-1f63-4031-81b6-8144283242e7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.030537 4689 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.030949 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wsts\" (UniqueName: \"kubernetes.io/projected/ea475aa5-1f63-4031-81b6-8144283242e7-kube-api-access-6wsts\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.031085 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.031215 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.031347 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.031474 4689 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ea475aa5-1f63-4031-81b6-8144283242e7-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.283391 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9tbs2" event={"ID":"ea475aa5-1f63-4031-81b6-8144283242e7","Type":"ContainerDied","Data":"f375453b2357513c49efb6de42a774c3abf7e630ce859dc496580e4d8f9d2618"} Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.283841 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f375453b2357513c49efb6de42a774c3abf7e630ce859dc496580e4d8f9d2618" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.284018 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9tbs2" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.467580 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-9tbs2"] Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.474542 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-9tbs2"] Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.560838 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-w2fqn"] Oct 13 21:26:58 crc kubenswrapper[4689]: E1013 21:26:58.561632 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea475aa5-1f63-4031-81b6-8144283242e7" containerName="keystone-bootstrap" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.561654 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea475aa5-1f63-4031-81b6-8144283242e7" containerName="keystone-bootstrap" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.561855 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea475aa5-1f63-4031-81b6-8144283242e7" containerName="keystone-bootstrap" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.562945 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.565458 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-dqmvt" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.566813 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.566834 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.569930 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.573521 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-w2fqn"] Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.640951 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-fernet-keys\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.640995 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-config-data\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.641027 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-scripts\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.641074 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-credential-keys\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.641235 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmf2g\" (UniqueName: \"kubernetes.io/projected/73cfa0da-2c68-445e-9d41-c2cee9857776-kube-api-access-qmf2g\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.641262 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-combined-ca-bundle\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.743084 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmf2g\" (UniqueName: \"kubernetes.io/projected/73cfa0da-2c68-445e-9d41-c2cee9857776-kube-api-access-qmf2g\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.743140 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-combined-ca-bundle\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.743184 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-fernet-keys\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.743201 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-config-data\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.743227 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-scripts\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.743276 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-credential-keys\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.752808 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-scripts\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.753873 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-combined-ca-bundle\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.758461 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-credential-keys\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.760317 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-fernet-keys\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.762373 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-config-data\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.767147 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmf2g\" (UniqueName: \"kubernetes.io/projected/73cfa0da-2c68-445e-9d41-c2cee9857776-kube-api-access-qmf2g\") pod \"keystone-bootstrap-w2fqn\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:58 crc kubenswrapper[4689]: I1013 21:26:58.906940 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:26:59 crc kubenswrapper[4689]: I1013 21:26:59.879241 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea475aa5-1f63-4031-81b6-8144283242e7" path="/var/lib/kubelet/pods/ea475aa5-1f63-4031-81b6-8144283242e7/volumes" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.076815 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.147179 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-lwszp"] Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.147522 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-lwszp" podUID="8172cb34-22e4-4710-a9b2-94cb60495469" containerName="dnsmasq-dns" containerID="cri-o://831d1039236b73c796e9c9e1839ed94aa2f812b31839892e5ee3c690792476a7" gracePeriod=10 Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.249623 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-c5756884f-lw5tc"] Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.285019 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-795fd646bb-2s89l"] Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.287014 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.290178 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.310184 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-795fd646bb-2s89l"] Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.356199 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-d5fb75fdf-9bkv8"] Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.398368 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-56b8966ffb-99krc"] Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.399625 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd448066-1b70-4e35-959c-5c702d87560f-logs\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.399718 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-horizon-secret-key\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.399778 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-combined-ca-bundle\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.399815 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd448066-1b70-4e35-959c-5c702d87560f-config-data\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.399999 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd448066-1b70-4e35-959c-5c702d87560f-scripts\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.400312 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-horizon-tls-certs\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.400693 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mg8t2\" (UniqueName: \"kubernetes.io/projected/fd448066-1b70-4e35-959c-5c702d87560f-kube-api-access-mg8t2\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.402794 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.418907 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-56b8966ffb-99krc"] Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.502870 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d46a395d-e4aa-45cb-85a7-86a43d5d7371-horizon-secret-key\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.502919 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d46a395d-e4aa-45cb-85a7-86a43d5d7371-combined-ca-bundle\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.502940 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvx6n\" (UniqueName: \"kubernetes.io/projected/d46a395d-e4aa-45cb-85a7-86a43d5d7371-kube-api-access-rvx6n\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.502964 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-combined-ca-bundle\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.502983 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d46a395d-e4aa-45cb-85a7-86a43d5d7371-config-data\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.503009 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd448066-1b70-4e35-959c-5c702d87560f-config-data\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.503092 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd448066-1b70-4e35-959c-5c702d87560f-scripts\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.503150 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-horizon-tls-certs\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.503185 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d46a395d-e4aa-45cb-85a7-86a43d5d7371-logs\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.503208 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mg8t2\" (UniqueName: \"kubernetes.io/projected/fd448066-1b70-4e35-959c-5c702d87560f-kube-api-access-mg8t2\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.503252 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/d46a395d-e4aa-45cb-85a7-86a43d5d7371-horizon-tls-certs\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.504160 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd448066-1b70-4e35-959c-5c702d87560f-logs\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.504237 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd448066-1b70-4e35-959c-5c702d87560f-logs\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.504297 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-horizon-secret-key\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.504332 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d46a395d-e4aa-45cb-85a7-86a43d5d7371-scripts\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.505023 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd448066-1b70-4e35-959c-5c702d87560f-config-data\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.507896 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd448066-1b70-4e35-959c-5c702d87560f-scripts\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.513637 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-combined-ca-bundle\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.529928 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-horizon-secret-key\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.531025 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-horizon-tls-certs\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.532640 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mg8t2\" (UniqueName: \"kubernetes.io/projected/fd448066-1b70-4e35-959c-5c702d87560f-kube-api-access-mg8t2\") pod \"horizon-795fd646bb-2s89l\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.606296 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d46a395d-e4aa-45cb-85a7-86a43d5d7371-scripts\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.606379 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d46a395d-e4aa-45cb-85a7-86a43d5d7371-horizon-secret-key\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.606407 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d46a395d-e4aa-45cb-85a7-86a43d5d7371-combined-ca-bundle\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.606547 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvx6n\" (UniqueName: \"kubernetes.io/projected/d46a395d-e4aa-45cb-85a7-86a43d5d7371-kube-api-access-rvx6n\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.606567 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d46a395d-e4aa-45cb-85a7-86a43d5d7371-config-data\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.607388 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d46a395d-e4aa-45cb-85a7-86a43d5d7371-scripts\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.607736 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d46a395d-e4aa-45cb-85a7-86a43d5d7371-logs\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.607764 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/d46a395d-e4aa-45cb-85a7-86a43d5d7371-horizon-tls-certs\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.608571 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d46a395d-e4aa-45cb-85a7-86a43d5d7371-logs\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.609611 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d46a395d-e4aa-45cb-85a7-86a43d5d7371-config-data\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.610719 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d46a395d-e4aa-45cb-85a7-86a43d5d7371-horizon-secret-key\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.611102 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/d46a395d-e4aa-45cb-85a7-86a43d5d7371-horizon-tls-certs\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.612487 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d46a395d-e4aa-45cb-85a7-86a43d5d7371-combined-ca-bundle\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.624962 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvx6n\" (UniqueName: \"kubernetes.io/projected/d46a395d-e4aa-45cb-85a7-86a43d5d7371-kube-api-access-rvx6n\") pod \"horizon-56b8966ffb-99krc\" (UID: \"d46a395d-e4aa-45cb-85a7-86a43d5d7371\") " pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.629564 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:01 crc kubenswrapper[4689]: I1013 21:27:01.731493 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:02 crc kubenswrapper[4689]: I1013 21:27:02.113856 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-lwszp" podUID="8172cb34-22e4-4710-a9b2-94cb60495469" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Oct 13 21:27:02 crc kubenswrapper[4689]: I1013 21:27:02.341842 4689 generic.go:334] "Generic (PLEG): container finished" podID="8172cb34-22e4-4710-a9b2-94cb60495469" containerID="831d1039236b73c796e9c9e1839ed94aa2f812b31839892e5ee3c690792476a7" exitCode=0 Oct 13 21:27:02 crc kubenswrapper[4689]: I1013 21:27:02.341915 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-lwszp" event={"ID":"8172cb34-22e4-4710-a9b2-94cb60495469","Type":"ContainerDied","Data":"831d1039236b73c796e9c9e1839ed94aa2f812b31839892e5ee3c690792476a7"} Oct 13 21:27:07 crc kubenswrapper[4689]: I1013 21:27:07.114639 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-lwszp" podUID="8172cb34-22e4-4710-a9b2-94cb60495469" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Oct 13 21:27:07 crc kubenswrapper[4689]: E1013 21:27:07.582367 4689 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Oct 13 21:27:07 crc kubenswrapper[4689]: E1013 21:27:07.582904 4689 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cq48s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-4cb78_openstack(7a541987-5ad6-4f2f-b625-5b8105b669e5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 13 21:27:07 crc kubenswrapper[4689]: E1013 21:27:07.584762 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-4cb78" podUID="7a541987-5ad6-4f2f-b625-5b8105b669e5" Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.009212 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.154365 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dt527\" (UniqueName: \"kubernetes.io/projected/8172cb34-22e4-4710-a9b2-94cb60495469-kube-api-access-dt527\") pod \"8172cb34-22e4-4710-a9b2-94cb60495469\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.154433 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-ovsdbserver-sb\") pod \"8172cb34-22e4-4710-a9b2-94cb60495469\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.154558 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-dns-svc\") pod \"8172cb34-22e4-4710-a9b2-94cb60495469\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.154765 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-config\") pod \"8172cb34-22e4-4710-a9b2-94cb60495469\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.154785 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-ovsdbserver-nb\") pod \"8172cb34-22e4-4710-a9b2-94cb60495469\" (UID: \"8172cb34-22e4-4710-a9b2-94cb60495469\") " Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.160840 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8172cb34-22e4-4710-a9b2-94cb60495469-kube-api-access-dt527" (OuterVolumeSpecName: "kube-api-access-dt527") pod "8172cb34-22e4-4710-a9b2-94cb60495469" (UID: "8172cb34-22e4-4710-a9b2-94cb60495469"). InnerVolumeSpecName "kube-api-access-dt527". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.239470 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-config" (OuterVolumeSpecName: "config") pod "8172cb34-22e4-4710-a9b2-94cb60495469" (UID: "8172cb34-22e4-4710-a9b2-94cb60495469"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.241426 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8172cb34-22e4-4710-a9b2-94cb60495469" (UID: "8172cb34-22e4-4710-a9b2-94cb60495469"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.246718 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8172cb34-22e4-4710-a9b2-94cb60495469" (UID: "8172cb34-22e4-4710-a9b2-94cb60495469"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.259397 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.259429 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.259442 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dt527\" (UniqueName: \"kubernetes.io/projected/8172cb34-22e4-4710-a9b2-94cb60495469-kube-api-access-dt527\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.259451 4689 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.321784 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8172cb34-22e4-4710-a9b2-94cb60495469" (UID: "8172cb34-22e4-4710-a9b2-94cb60495469"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.361825 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8172cb34-22e4-4710-a9b2-94cb60495469-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.384390 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8dc8-account-create-h9cqt"] Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.405908 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-776df869fc-xvpk7" event={"ID":"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f","Type":"ContainerStarted","Data":"e226370ef3d71605744db19b503788f9615bc782e87834c65b662671f1cc5f23"} Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.420158 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-lwszp" Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.420185 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-lwszp" event={"ID":"8172cb34-22e4-4710-a9b2-94cb60495469","Type":"ContainerDied","Data":"0e16f1edf8666a4e36ea5d36a7523406ed9967555dd8561ac4e1f0fc1e6ad710"} Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.420238 4689 scope.go:117] "RemoveContainer" containerID="831d1039236b73c796e9c9e1839ed94aa2f812b31839892e5ee3c690792476a7" Oct 13 21:27:08 crc kubenswrapper[4689]: E1013 21:27:08.421968 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-4cb78" podUID="7a541987-5ad6-4f2f-b625-5b8105b669e5" Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.426264 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-93c8-account-create-qzmql"] Oct 13 21:27:08 crc kubenswrapper[4689]: W1013 21:27:08.433733 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e0d19eb_6990_416e_b3cc_3f696c23146a.slice/crio-51d06c179cf0af5bb3d87a28ccf215a3e539bfe7454994965bf7878bf305493d WatchSource:0}: Error finding container 51d06c179cf0af5bb3d87a28ccf215a3e539bfe7454994965bf7878bf305493d: Status 404 returned error can't find the container with id 51d06c179cf0af5bb3d87a28ccf215a3e539bfe7454994965bf7878bf305493d Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.512735 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-lwszp"] Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.533605 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-lwszp"] Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.538908 4689 scope.go:117] "RemoveContainer" containerID="6fde0c4e5f8bec9732978d15cfba23d3a4095bc8633a02dde99f73ced0051fd0" Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.541812 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-2740-account-create-cjdts"] Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.550775 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-56b8966ffb-99krc"] Oct 13 21:27:08 crc kubenswrapper[4689]: W1013 21:27:08.558459 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod73cfa0da_2c68_445e_9d41_c2cee9857776.slice/crio-369846f7340beaa0ec4f7cf04fdea4d8321b149c49342b0f29a9dd8c7192778e WatchSource:0}: Error finding container 369846f7340beaa0ec4f7cf04fdea4d8321b149c49342b0f29a9dd8c7192778e: Status 404 returned error can't find the container with id 369846f7340beaa0ec4f7cf04fdea4d8321b149c49342b0f29a9dd8c7192778e Oct 13 21:27:08 crc kubenswrapper[4689]: W1013 21:27:08.559209 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd46a395d_e4aa_45cb_85a7_86a43d5d7371.slice/crio-3c7c05d28a81f9031e09c6e4adc816dc60b568e3e6dd28621d8cbf3e01e21525 WatchSource:0}: Error finding container 3c7c05d28a81f9031e09c6e4adc816dc60b568e3e6dd28621d8cbf3e01e21525: Status 404 returned error can't find the container with id 3c7c05d28a81f9031e09c6e4adc816dc60b568e3e6dd28621d8cbf3e01e21525 Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.560636 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-795fd646bb-2s89l"] Oct 13 21:27:08 crc kubenswrapper[4689]: W1013 21:27:08.563807 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25e7989d_06bf_4909_848c_92a353d7be6f.slice/crio-e7335f48913199b8d7c6340fba342e4920063d0f824c3ae3602a8557e19b4ab8 WatchSource:0}: Error finding container e7335f48913199b8d7c6340fba342e4920063d0f824c3ae3602a8557e19b4ab8: Status 404 returned error can't find the container with id e7335f48913199b8d7c6340fba342e4920063d0f824c3ae3602a8557e19b4ab8 Oct 13 21:27:08 crc kubenswrapper[4689]: I1013 21:27:08.569075 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-w2fqn"] Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.451868 4689 generic.go:334] "Generic (PLEG): container finished" podID="25e7989d-06bf-4909-848c-92a353d7be6f" containerID="04021a95b96427d380934b4cdfcaf8997e9eb782d26f3fd374e21abfba6fdc49" exitCode=0 Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.452485 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-2740-account-create-cjdts" event={"ID":"25e7989d-06bf-4909-848c-92a353d7be6f","Type":"ContainerDied","Data":"04021a95b96427d380934b4cdfcaf8997e9eb782d26f3fd374e21abfba6fdc49"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.452541 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-2740-account-create-cjdts" event={"ID":"25e7989d-06bf-4909-848c-92a353d7be6f","Type":"ContainerStarted","Data":"e7335f48913199b8d7c6340fba342e4920063d0f824c3ae3602a8557e19b4ab8"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.483453 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"37236ceb-29ab-4e07-9e6f-6642dbed073c","Type":"ContainerStarted","Data":"7827568eda3987aac4577fbf9b2132240f997c39e8216e41845bf7355f8439a4"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.483646 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="37236ceb-29ab-4e07-9e6f-6642dbed073c" containerName="glance-log" containerID="cri-o://a8f99b34a010cae954d35404b0fc0a17b3f3e7105ab77f4c1923383d7b38bb76" gracePeriod=30 Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.484189 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="37236ceb-29ab-4e07-9e6f-6642dbed073c" containerName="glance-httpd" containerID="cri-o://7827568eda3987aac4577fbf9b2132240f997c39e8216e41845bf7355f8439a4" gracePeriod=30 Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.518717 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c5756884f-lw5tc" event={"ID":"5c0aa883-db89-41fe-b956-cbb3994efcc7","Type":"ContainerStarted","Data":"d92b8f3891e2810eea5ec9bba43ca2571fec7d80219635e9e753d63a03b94a13"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.518767 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c5756884f-lw5tc" event={"ID":"5c0aa883-db89-41fe-b956-cbb3994efcc7","Type":"ContainerStarted","Data":"89057d727fd0c0d650ec83c4141afb37e1a87c1cf9a929f2f7a5fd9706691ff1"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.519022 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-c5756884f-lw5tc" podUID="5c0aa883-db89-41fe-b956-cbb3994efcc7" containerName="horizon-log" containerID="cri-o://89057d727fd0c0d650ec83c4141afb37e1a87c1cf9a929f2f7a5fd9706691ff1" gracePeriod=30 Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.519329 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-c5756884f-lw5tc" podUID="5c0aa883-db89-41fe-b956-cbb3994efcc7" containerName="horizon" containerID="cri-o://d92b8f3891e2810eea5ec9bba43ca2571fec7d80219635e9e753d63a03b94a13" gracePeriod=30 Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.519806 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=19.519781983 podStartE2EDuration="19.519781983s" podCreationTimestamp="2025-10-13 21:26:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:27:09.519044765 +0000 UTC m=+946.437289850" watchObservedRunningTime="2025-10-13 21:27:09.519781983 +0000 UTC m=+946.438027068" Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.534025 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-795fd646bb-2s89l" event={"ID":"fd448066-1b70-4e35-959c-5c702d87560f","Type":"ContainerStarted","Data":"0508f3a0b3e63cabdacf7e4be5cf5ff224833a9698433874f6b8ed378530891e"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.534090 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-795fd646bb-2s89l" event={"ID":"fd448066-1b70-4e35-959c-5c702d87560f","Type":"ContainerStarted","Data":"ce4aef8ec666f4112fbee29d2094287992ad575dfa2dcb5e6137937f6269eaf0"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.536038 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a37ccb01-d27b-43e7-bf7d-902a962053f4","Type":"ContainerStarted","Data":"8cbbfa6c2cf50aa25c9179fc5873db4c670f6fa3395f19d3e44f620a081a4d23"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.538014 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-w2fqn" event={"ID":"73cfa0da-2c68-445e-9d41-c2cee9857776","Type":"ContainerStarted","Data":"71c8ac33010c29d283dec138712b33674ff75ae4aa042d2ed76733062146f7f9"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.538045 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-w2fqn" event={"ID":"73cfa0da-2c68-445e-9d41-c2cee9857776","Type":"ContainerStarted","Data":"369846f7340beaa0ec4f7cf04fdea4d8321b149c49342b0f29a9dd8c7192778e"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.548239 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56b8966ffb-99krc" event={"ID":"d46a395d-e4aa-45cb-85a7-86a43d5d7371","Type":"ContainerStarted","Data":"887ff983be8ab42945e9763606b8b582101e75c4ea86bc3fed073bbd538194c6"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.548398 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56b8966ffb-99krc" event={"ID":"d46a395d-e4aa-45cb-85a7-86a43d5d7371","Type":"ContainerStarted","Data":"3c7c05d28a81f9031e09c6e4adc816dc60b568e3e6dd28621d8cbf3e01e21525"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.554429 4689 generic.go:334] "Generic (PLEG): container finished" podID="06e1fd87-93da-41a7-8e6d-6477bcaa7bec" containerID="74c4c5a1c1dbb02b03a9c7932c3d6a7e0c8cd65ebfebdbb1887ceff667a56093" exitCode=0 Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.554533 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-93c8-account-create-qzmql" event={"ID":"06e1fd87-93da-41a7-8e6d-6477bcaa7bec","Type":"ContainerDied","Data":"74c4c5a1c1dbb02b03a9c7932c3d6a7e0c8cd65ebfebdbb1887ceff667a56093"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.554566 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-93c8-account-create-qzmql" event={"ID":"06e1fd87-93da-41a7-8e6d-6477bcaa7bec","Type":"ContainerStarted","Data":"95d05e4cfc19ebd04e74c61527b0fca57ae56a4f19bf7b56ee27418ea8f10815"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.564752 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d5fb75fdf-9bkv8" event={"ID":"5f582acd-f393-4d97-a002-186f0ccb1c86","Type":"ContainerStarted","Data":"178496e8e51f9fc6b23684c0babc353897234e15ac7624aaaaf2a3bba1211455"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.564824 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d5fb75fdf-9bkv8" event={"ID":"5f582acd-f393-4d97-a002-186f0ccb1c86","Type":"ContainerStarted","Data":"81c20e55b5748fa6821d7803f8849af873c2ad22493e84bee684a4cc924c09dd"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.565068 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-d5fb75fdf-9bkv8" podUID="5f582acd-f393-4d97-a002-186f0ccb1c86" containerName="horizon-log" containerID="cri-o://81c20e55b5748fa6821d7803f8849af873c2ad22493e84bee684a4cc924c09dd" gracePeriod=30 Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.566812 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-d5fb75fdf-9bkv8" podUID="5f582acd-f393-4d97-a002-186f0ccb1c86" containerName="horizon" containerID="cri-o://178496e8e51f9fc6b23684c0babc353897234e15ac7624aaaaf2a3bba1211455" gracePeriod=30 Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.570179 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-c5756884f-lw5tc" podStartSLOduration=3.306404829 podStartE2EDuration="20.570155209s" podCreationTimestamp="2025-10-13 21:26:49 +0000 UTC" firstStartedPulling="2025-10-13 21:26:50.611951852 +0000 UTC m=+927.530196937" lastFinishedPulling="2025-10-13 21:27:07.875702232 +0000 UTC m=+944.793947317" observedRunningTime="2025-10-13 21:27:09.544944815 +0000 UTC m=+946.463189900" watchObservedRunningTime="2025-10-13 21:27:09.570155209 +0000 UTC m=+946.488400284" Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.571506 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-w2fqn" podStartSLOduration=11.571498341 podStartE2EDuration="11.571498341s" podCreationTimestamp="2025-10-13 21:26:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:27:09.55913052 +0000 UTC m=+946.477375605" watchObservedRunningTime="2025-10-13 21:27:09.571498341 +0000 UTC m=+946.489743426" Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.583453 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-56b8966ffb-99krc" podStartSLOduration=8.583439003 podStartE2EDuration="8.583439003s" podCreationTimestamp="2025-10-13 21:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:27:09.582117202 +0000 UTC m=+946.500362287" watchObservedRunningTime="2025-10-13 21:27:09.583439003 +0000 UTC m=+946.501684088" Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.591243 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-776df869fc-xvpk7" event={"ID":"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f","Type":"ContainerStarted","Data":"7aafd053d88eb4bb981e2a34bd3dd8ef2e2addad3e5db9743260fdc7a92cc8aa"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.591652 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-776df869fc-xvpk7" podUID="b374e925-6d17-4ef2-8e3b-561b0a8b6a8f" containerName="horizon-log" containerID="cri-o://e226370ef3d71605744db19b503788f9615bc782e87834c65b662671f1cc5f23" gracePeriod=30 Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.592104 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-776df869fc-xvpk7" podUID="b374e925-6d17-4ef2-8e3b-561b0a8b6a8f" containerName="horizon" containerID="cri-o://7aafd053d88eb4bb981e2a34bd3dd8ef2e2addad3e5db9743260fdc7a92cc8aa" gracePeriod=30 Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.594386 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e0a2c0f5-6c89-4f10-8eba-623190fd162b","Type":"ContainerStarted","Data":"2c2665bedfc49bbf4f93f338291d7a3f29cd9dc88a3e007e215203e698df298b"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.594703 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="e0a2c0f5-6c89-4f10-8eba-623190fd162b" containerName="glance-log" containerID="cri-o://10026754ade7952a531a6c0d31017c5426935f0445c97a451ea36b5a28923c9d" gracePeriod=30 Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.594845 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="e0a2c0f5-6c89-4f10-8eba-623190fd162b" containerName="glance-httpd" containerID="cri-o://2c2665bedfc49bbf4f93f338291d7a3f29cd9dc88a3e007e215203e698df298b" gracePeriod=30 Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.596397 4689 generic.go:334] "Generic (PLEG): container finished" podID="8e0d19eb-6990-416e-b3cc-3f696c23146a" containerID="195163e77bd1d175263ea3301b4458341e38edf1533b768e26f9ffa0e1f05a5d" exitCode=0 Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.599249 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8dc8-account-create-h9cqt" event={"ID":"8e0d19eb-6990-416e-b3cc-3f696c23146a","Type":"ContainerDied","Data":"195163e77bd1d175263ea3301b4458341e38edf1533b768e26f9ffa0e1f05a5d"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.601514 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8dc8-account-create-h9cqt" event={"ID":"8e0d19eb-6990-416e-b3cc-3f696c23146a","Type":"ContainerStarted","Data":"51d06c179cf0af5bb3d87a28ccf215a3e539bfe7454994965bf7878bf305493d"} Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.636800 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.638826 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-d5fb75fdf-9bkv8" podStartSLOduration=3.467879052 podStartE2EDuration="17.638806639s" podCreationTimestamp="2025-10-13 21:26:52 +0000 UTC" firstStartedPulling="2025-10-13 21:26:53.8080363 +0000 UTC m=+930.726281385" lastFinishedPulling="2025-10-13 21:27:07.978963887 +0000 UTC m=+944.897208972" observedRunningTime="2025-10-13 21:27:09.636521775 +0000 UTC m=+946.554766860" watchObservedRunningTime="2025-10-13 21:27:09.638806639 +0000 UTC m=+946.557051724" Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.741863 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=19.741833397 podStartE2EDuration="19.741833397s" podCreationTimestamp="2025-10-13 21:26:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:27:09.671168382 +0000 UTC m=+946.589413467" watchObservedRunningTime="2025-10-13 21:27:09.741833397 +0000 UTC m=+946.660078492" Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.763379 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-776df869fc-xvpk7" podStartSLOduration=3.798816879 podStartE2EDuration="20.763350444s" podCreationTimestamp="2025-10-13 21:26:49 +0000 UTC" firstStartedPulling="2025-10-13 21:26:50.796482443 +0000 UTC m=+927.714727528" lastFinishedPulling="2025-10-13 21:27:07.761016008 +0000 UTC m=+944.679261093" observedRunningTime="2025-10-13 21:27:09.696270273 +0000 UTC m=+946.614515368" watchObservedRunningTime="2025-10-13 21:27:09.763350444 +0000 UTC m=+946.681595529" Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.884322 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8172cb34-22e4-4710-a9b2-94cb60495469" path="/var/lib/kubelet/pods/8172cb34-22e4-4710-a9b2-94cb60495469/volumes" Oct 13 21:27:09 crc kubenswrapper[4689]: I1013 21:27:09.993076 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.625256 4689 generic.go:334] "Generic (PLEG): container finished" podID="37236ceb-29ab-4e07-9e6f-6642dbed073c" containerID="7827568eda3987aac4577fbf9b2132240f997c39e8216e41845bf7355f8439a4" exitCode=0 Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.625747 4689 generic.go:334] "Generic (PLEG): container finished" podID="37236ceb-29ab-4e07-9e6f-6642dbed073c" containerID="a8f99b34a010cae954d35404b0fc0a17b3f3e7105ab77f4c1923383d7b38bb76" exitCode=143 Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.625439 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"37236ceb-29ab-4e07-9e6f-6642dbed073c","Type":"ContainerDied","Data":"7827568eda3987aac4577fbf9b2132240f997c39e8216e41845bf7355f8439a4"} Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.625839 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"37236ceb-29ab-4e07-9e6f-6642dbed073c","Type":"ContainerDied","Data":"a8f99b34a010cae954d35404b0fc0a17b3f3e7105ab77f4c1923383d7b38bb76"} Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.639142 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-795fd646bb-2s89l" event={"ID":"fd448066-1b70-4e35-959c-5c702d87560f","Type":"ContainerStarted","Data":"5d810c468ab93d2b6a2b55e29dbd0f0877bd15323cdf71fdbf9ad1a0b002bf83"} Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.654896 4689 generic.go:334] "Generic (PLEG): container finished" podID="e0a2c0f5-6c89-4f10-8eba-623190fd162b" containerID="2c2665bedfc49bbf4f93f338291d7a3f29cd9dc88a3e007e215203e698df298b" exitCode=0 Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.654942 4689 generic.go:334] "Generic (PLEG): container finished" podID="e0a2c0f5-6c89-4f10-8eba-623190fd162b" containerID="10026754ade7952a531a6c0d31017c5426935f0445c97a451ea36b5a28923c9d" exitCode=143 Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.655020 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e0a2c0f5-6c89-4f10-8eba-623190fd162b","Type":"ContainerDied","Data":"2c2665bedfc49bbf4f93f338291d7a3f29cd9dc88a3e007e215203e698df298b"} Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.655534 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e0a2c0f5-6c89-4f10-8eba-623190fd162b","Type":"ContainerDied","Data":"10026754ade7952a531a6c0d31017c5426935f0445c97a451ea36b5a28923c9d"} Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.655553 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e0a2c0f5-6c89-4f10-8eba-623190fd162b","Type":"ContainerDied","Data":"109408d8ea12ff35c1b5ca660d7fee17d569b6040e1eb1e19bee7c32b5e0ebc0"} Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.655573 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="109408d8ea12ff35c1b5ca660d7fee17d569b6040e1eb1e19bee7c32b5e0ebc0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.675024 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-795fd646bb-2s89l" podStartSLOduration=9.674991747 podStartE2EDuration="9.674991747s" podCreationTimestamp="2025-10-13 21:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:27:10.672070308 +0000 UTC m=+947.590315403" watchObservedRunningTime="2025-10-13 21:27:10.674991747 +0000 UTC m=+947.593236832" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.678149 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56b8966ffb-99krc" event={"ID":"d46a395d-e4aa-45cb-85a7-86a43d5d7371","Type":"ContainerStarted","Data":"b348f03b3e248f1c749241be75748277913594c210882f8c69fcb73384f5e312"} Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.700879 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.821949 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-combined-ca-bundle\") pod \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.822007 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-config-data\") pod \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.822049 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-scripts\") pod \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.822117 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.822172 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8r2n\" (UniqueName: \"kubernetes.io/projected/e0a2c0f5-6c89-4f10-8eba-623190fd162b-kube-api-access-m8r2n\") pod \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.822200 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e0a2c0f5-6c89-4f10-8eba-623190fd162b-httpd-run\") pod \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.822265 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0a2c0f5-6c89-4f10-8eba-623190fd162b-logs\") pod \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\" (UID: \"e0a2c0f5-6c89-4f10-8eba-623190fd162b\") " Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.827807 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0a2c0f5-6c89-4f10-8eba-623190fd162b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "e0a2c0f5-6c89-4f10-8eba-623190fd162b" (UID: "e0a2c0f5-6c89-4f10-8eba-623190fd162b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.829819 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0a2c0f5-6c89-4f10-8eba-623190fd162b-logs" (OuterVolumeSpecName: "logs") pod "e0a2c0f5-6c89-4f10-8eba-623190fd162b" (UID: "e0a2c0f5-6c89-4f10-8eba-623190fd162b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.838250 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-scripts" (OuterVolumeSpecName: "scripts") pod "e0a2c0f5-6c89-4f10-8eba-623190fd162b" (UID: "e0a2c0f5-6c89-4f10-8eba-623190fd162b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.847551 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "e0a2c0f5-6c89-4f10-8eba-623190fd162b" (UID: "e0a2c0f5-6c89-4f10-8eba-623190fd162b"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.873918 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0a2c0f5-6c89-4f10-8eba-623190fd162b-kube-api-access-m8r2n" (OuterVolumeSpecName: "kube-api-access-m8r2n") pod "e0a2c0f5-6c89-4f10-8eba-623190fd162b" (UID: "e0a2c0f5-6c89-4f10-8eba-623190fd162b"). InnerVolumeSpecName "kube-api-access-m8r2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.881835 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e0a2c0f5-6c89-4f10-8eba-623190fd162b" (UID: "e0a2c0f5-6c89-4f10-8eba-623190fd162b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.915178 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-config-data" (OuterVolumeSpecName: "config-data") pod "e0a2c0f5-6c89-4f10-8eba-623190fd162b" (UID: "e0a2c0f5-6c89-4f10-8eba-623190fd162b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.926602 4689 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0a2c0f5-6c89-4f10-8eba-623190fd162b-logs\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.926644 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.926662 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.926675 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0a2c0f5-6c89-4f10-8eba-623190fd162b-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.926703 4689 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.926716 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8r2n\" (UniqueName: \"kubernetes.io/projected/e0a2c0f5-6c89-4f10-8eba-623190fd162b-kube-api-access-m8r2n\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.926729 4689 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e0a2c0f5-6c89-4f10-8eba-623190fd162b-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:10.951074 4689 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.028579 4689 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.630512 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.630558 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.684999 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.715555 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.725896 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.732857 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.732896 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.742684 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 21:27:11 crc kubenswrapper[4689]: E1013 21:27:11.743118 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8172cb34-22e4-4710-a9b2-94cb60495469" containerName="dnsmasq-dns" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.743133 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8172cb34-22e4-4710-a9b2-94cb60495469" containerName="dnsmasq-dns" Oct 13 21:27:11 crc kubenswrapper[4689]: E1013 21:27:11.743143 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0a2c0f5-6c89-4f10-8eba-623190fd162b" containerName="glance-log" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.743150 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0a2c0f5-6c89-4f10-8eba-623190fd162b" containerName="glance-log" Oct 13 21:27:11 crc kubenswrapper[4689]: E1013 21:27:11.743176 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0a2c0f5-6c89-4f10-8eba-623190fd162b" containerName="glance-httpd" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.743183 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0a2c0f5-6c89-4f10-8eba-623190fd162b" containerName="glance-httpd" Oct 13 21:27:11 crc kubenswrapper[4689]: E1013 21:27:11.743214 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8172cb34-22e4-4710-a9b2-94cb60495469" containerName="init" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.743220 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8172cb34-22e4-4710-a9b2-94cb60495469" containerName="init" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.743388 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0a2c0f5-6c89-4f10-8eba-623190fd162b" containerName="glance-log" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.743413 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0a2c0f5-6c89-4f10-8eba-623190fd162b" containerName="glance-httpd" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.743420 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="8172cb34-22e4-4710-a9b2-94cb60495469" containerName="dnsmasq-dns" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.744350 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.747106 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.747210 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.768770 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.843504 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-config-data\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.843559 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-scripts\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.843664 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.843701 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0bc4829-b428-4d71-aeea-719aa026dcc0-logs\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.843899 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dp8d\" (UniqueName: \"kubernetes.io/projected/c0bc4829-b428-4d71-aeea-719aa026dcc0-kube-api-access-6dp8d\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.843933 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.844022 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0bc4829-b428-4d71-aeea-719aa026dcc0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.844055 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.881034 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0a2c0f5-6c89-4f10-8eba-623190fd162b" path="/var/lib/kubelet/pods/e0a2c0f5-6c89-4f10-8eba-623190fd162b/volumes" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.945380 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.945694 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dp8d\" (UniqueName: \"kubernetes.io/projected/c0bc4829-b428-4d71-aeea-719aa026dcc0-kube-api-access-6dp8d\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.945744 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0bc4829-b428-4d71-aeea-719aa026dcc0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.945770 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.945828 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-config-data\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.945869 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-scripts\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.945925 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.945971 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0bc4829-b428-4d71-aeea-719aa026dcc0-logs\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.945665 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.949087 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0bc4829-b428-4d71-aeea-719aa026dcc0-logs\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.949304 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0bc4829-b428-4d71-aeea-719aa026dcc0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.959175 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.959722 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-scripts\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.961803 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-config-data\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.964858 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:11 crc kubenswrapper[4689]: I1013 21:27:11.997502 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dp8d\" (UniqueName: \"kubernetes.io/projected/c0bc4829-b428-4d71-aeea-719aa026dcc0-kube-api-access-6dp8d\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.016504 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " pod="openstack/glance-default-external-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.072136 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.360923 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.391692 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-93c8-account-create-qzmql" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.402649 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2740-account-create-cjdts" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.415421 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8dc8-account-create-h9cqt" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.464273 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k47sp\" (UniqueName: \"kubernetes.io/projected/8e0d19eb-6990-416e-b3cc-3f696c23146a-kube-api-access-k47sp\") pod \"8e0d19eb-6990-416e-b3cc-3f696c23146a\" (UID: \"8e0d19eb-6990-416e-b3cc-3f696c23146a\") " Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.464342 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-config-data\") pod \"37236ceb-29ab-4e07-9e6f-6642dbed073c\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.464380 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpp2l\" (UniqueName: \"kubernetes.io/projected/06e1fd87-93da-41a7-8e6d-6477bcaa7bec-kube-api-access-kpp2l\") pod \"06e1fd87-93da-41a7-8e6d-6477bcaa7bec\" (UID: \"06e1fd87-93da-41a7-8e6d-6477bcaa7bec\") " Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.464416 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/37236ceb-29ab-4e07-9e6f-6642dbed073c-httpd-run\") pod \"37236ceb-29ab-4e07-9e6f-6642dbed073c\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.464441 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-scripts\") pod \"37236ceb-29ab-4e07-9e6f-6642dbed073c\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.464483 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bzcl\" (UniqueName: \"kubernetes.io/projected/25e7989d-06bf-4909-848c-92a353d7be6f-kube-api-access-5bzcl\") pod \"25e7989d-06bf-4909-848c-92a353d7be6f\" (UID: \"25e7989d-06bf-4909-848c-92a353d7be6f\") " Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.464520 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-combined-ca-bundle\") pod \"37236ceb-29ab-4e07-9e6f-6642dbed073c\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.464562 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37236ceb-29ab-4e07-9e6f-6642dbed073c-logs\") pod \"37236ceb-29ab-4e07-9e6f-6642dbed073c\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.464624 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"37236ceb-29ab-4e07-9e6f-6642dbed073c\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.464718 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mw4t2\" (UniqueName: \"kubernetes.io/projected/37236ceb-29ab-4e07-9e6f-6642dbed073c-kube-api-access-mw4t2\") pod \"37236ceb-29ab-4e07-9e6f-6642dbed073c\" (UID: \"37236ceb-29ab-4e07-9e6f-6642dbed073c\") " Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.465411 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37236ceb-29ab-4e07-9e6f-6642dbed073c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "37236ceb-29ab-4e07-9e6f-6642dbed073c" (UID: "37236ceb-29ab-4e07-9e6f-6642dbed073c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.472444 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37236ceb-29ab-4e07-9e6f-6642dbed073c-kube-api-access-mw4t2" (OuterVolumeSpecName: "kube-api-access-mw4t2") pod "37236ceb-29ab-4e07-9e6f-6642dbed073c" (UID: "37236ceb-29ab-4e07-9e6f-6642dbed073c"). InnerVolumeSpecName "kube-api-access-mw4t2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.472736 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37236ceb-29ab-4e07-9e6f-6642dbed073c-logs" (OuterVolumeSpecName: "logs") pod "37236ceb-29ab-4e07-9e6f-6642dbed073c" (UID: "37236ceb-29ab-4e07-9e6f-6642dbed073c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.473479 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e0d19eb-6990-416e-b3cc-3f696c23146a-kube-api-access-k47sp" (OuterVolumeSpecName: "kube-api-access-k47sp") pod "8e0d19eb-6990-416e-b3cc-3f696c23146a" (UID: "8e0d19eb-6990-416e-b3cc-3f696c23146a"). InnerVolumeSpecName "kube-api-access-k47sp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.499391 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06e1fd87-93da-41a7-8e6d-6477bcaa7bec-kube-api-access-kpp2l" (OuterVolumeSpecName: "kube-api-access-kpp2l") pod "06e1fd87-93da-41a7-8e6d-6477bcaa7bec" (UID: "06e1fd87-93da-41a7-8e6d-6477bcaa7bec"). InnerVolumeSpecName "kube-api-access-kpp2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.499482 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-scripts" (OuterVolumeSpecName: "scripts") pod "37236ceb-29ab-4e07-9e6f-6642dbed073c" (UID: "37236ceb-29ab-4e07-9e6f-6642dbed073c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.502754 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "37236ceb-29ab-4e07-9e6f-6642dbed073c" (UID: "37236ceb-29ab-4e07-9e6f-6642dbed073c"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.509647 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e7989d-06bf-4909-848c-92a353d7be6f-kube-api-access-5bzcl" (OuterVolumeSpecName: "kube-api-access-5bzcl") pod "25e7989d-06bf-4909-848c-92a353d7be6f" (UID: "25e7989d-06bf-4909-848c-92a353d7be6f"). InnerVolumeSpecName "kube-api-access-5bzcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.539752 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "37236ceb-29ab-4e07-9e6f-6642dbed073c" (UID: "37236ceb-29ab-4e07-9e6f-6642dbed073c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.566369 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpp2l\" (UniqueName: \"kubernetes.io/projected/06e1fd87-93da-41a7-8e6d-6477bcaa7bec-kube-api-access-kpp2l\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.566400 4689 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/37236ceb-29ab-4e07-9e6f-6642dbed073c-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.566412 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.566422 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bzcl\" (UniqueName: \"kubernetes.io/projected/25e7989d-06bf-4909-848c-92a353d7be6f-kube-api-access-5bzcl\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.566430 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.566439 4689 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37236ceb-29ab-4e07-9e6f-6642dbed073c-logs\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.566461 4689 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.566470 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mw4t2\" (UniqueName: \"kubernetes.io/projected/37236ceb-29ab-4e07-9e6f-6642dbed073c-kube-api-access-mw4t2\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.566479 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k47sp\" (UniqueName: \"kubernetes.io/projected/8e0d19eb-6990-416e-b3cc-3f696c23146a-kube-api-access-k47sp\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.586427 4689 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.599471 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-config-data" (OuterVolumeSpecName: "config-data") pod "37236ceb-29ab-4e07-9e6f-6642dbed073c" (UID: "37236ceb-29ab-4e07-9e6f-6642dbed073c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.668242 4689 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.668291 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37236ceb-29ab-4e07-9e6f-6642dbed073c-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.700010 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-93c8-account-create-qzmql" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.699990 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-93c8-account-create-qzmql" event={"ID":"06e1fd87-93da-41a7-8e6d-6477bcaa7bec","Type":"ContainerDied","Data":"95d05e4cfc19ebd04e74c61527b0fca57ae56a4f19bf7b56ee27418ea8f10815"} Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.700090 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95d05e4cfc19ebd04e74c61527b0fca57ae56a4f19bf7b56ee27418ea8f10815" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.702301 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-2740-account-create-cjdts" event={"ID":"25e7989d-06bf-4909-848c-92a353d7be6f","Type":"ContainerDied","Data":"e7335f48913199b8d7c6340fba342e4920063d0f824c3ae3602a8557e19b4ab8"} Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.702339 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e7335f48913199b8d7c6340fba342e4920063d0f824c3ae3602a8557e19b4ab8" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.702450 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2740-account-create-cjdts" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.705835 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"37236ceb-29ab-4e07-9e6f-6642dbed073c","Type":"ContainerDied","Data":"6718872e6d207f762ee8a1638054495ec2f1e3b084d2d1a8fe05fbba6936d3d7"} Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.705895 4689 scope.go:117] "RemoveContainer" containerID="7827568eda3987aac4577fbf9b2132240f997c39e8216e41845bf7355f8439a4" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.706044 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.720283 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8dc8-account-create-h9cqt" event={"ID":"8e0d19eb-6990-416e-b3cc-3f696c23146a","Type":"ContainerDied","Data":"51d06c179cf0af5bb3d87a28ccf215a3e539bfe7454994965bf7878bf305493d"} Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.720322 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="51d06c179cf0af5bb3d87a28ccf215a3e539bfe7454994965bf7878bf305493d" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.720420 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8dc8-account-create-h9cqt" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.729880 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a37ccb01-d27b-43e7-bf7d-902a962053f4","Type":"ContainerStarted","Data":"0dd8099122cbe47ada37da4eaf5c5ccdf760a7dc9073d5934f7e647ab2009cbd"} Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.759068 4689 scope.go:117] "RemoveContainer" containerID="a8f99b34a010cae954d35404b0fc0a17b3f3e7105ab77f4c1923383d7b38bb76" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.761835 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.775473 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.787968 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 21:27:12 crc kubenswrapper[4689]: E1013 21:27:12.788491 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37236ceb-29ab-4e07-9e6f-6642dbed073c" containerName="glance-httpd" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.788516 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="37236ceb-29ab-4e07-9e6f-6642dbed073c" containerName="glance-httpd" Oct 13 21:27:12 crc kubenswrapper[4689]: E1013 21:27:12.788532 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06e1fd87-93da-41a7-8e6d-6477bcaa7bec" containerName="mariadb-account-create" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.788541 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="06e1fd87-93da-41a7-8e6d-6477bcaa7bec" containerName="mariadb-account-create" Oct 13 21:27:12 crc kubenswrapper[4689]: E1013 21:27:12.788574 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e0d19eb-6990-416e-b3cc-3f696c23146a" containerName="mariadb-account-create" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.792541 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e0d19eb-6990-416e-b3cc-3f696c23146a" containerName="mariadb-account-create" Oct 13 21:27:12 crc kubenswrapper[4689]: E1013 21:27:12.792580 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25e7989d-06bf-4909-848c-92a353d7be6f" containerName="mariadb-account-create" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.792604 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="25e7989d-06bf-4909-848c-92a353d7be6f" containerName="mariadb-account-create" Oct 13 21:27:12 crc kubenswrapper[4689]: E1013 21:27:12.792628 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37236ceb-29ab-4e07-9e6f-6642dbed073c" containerName="glance-log" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.792635 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="37236ceb-29ab-4e07-9e6f-6642dbed073c" containerName="glance-log" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.792894 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="06e1fd87-93da-41a7-8e6d-6477bcaa7bec" containerName="mariadb-account-create" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.792917 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="25e7989d-06bf-4909-848c-92a353d7be6f" containerName="mariadb-account-create" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.792931 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e0d19eb-6990-416e-b3cc-3f696c23146a" containerName="mariadb-account-create" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.792941 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="37236ceb-29ab-4e07-9e6f-6642dbed073c" containerName="glance-httpd" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.792952 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="37236ceb-29ab-4e07-9e6f-6642dbed073c" containerName="glance-log" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.794206 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.799939 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.800823 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.806063 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.871406 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-config-data\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.871475 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.871500 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.871534 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f52lf\" (UniqueName: \"kubernetes.io/projected/519df139-a232-4218-a9ba-4d626fe3d115-kube-api-access-f52lf\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.871639 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/519df139-a232-4218-a9ba-4d626fe3d115-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.871701 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.871729 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/519df139-a232-4218-a9ba-4d626fe3d115-logs\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.871774 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-scripts\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.919344 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 21:27:12 crc kubenswrapper[4689]: W1013 21:27:12.936295 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0bc4829_b428_4d71_aeea_719aa026dcc0.slice/crio-f2c5d0e526947325f049d9d9a74d5967cf6120ae68cc7db0edbf0efc4ed2241b WatchSource:0}: Error finding container f2c5d0e526947325f049d9d9a74d5967cf6120ae68cc7db0edbf0efc4ed2241b: Status 404 returned error can't find the container with id f2c5d0e526947325f049d9d9a74d5967cf6120ae68cc7db0edbf0efc4ed2241b Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.973715 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.973771 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/519df139-a232-4218-a9ba-4d626fe3d115-logs\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.973826 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-scripts\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.973877 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-config-data\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.973918 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.973939 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.973971 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f52lf\" (UniqueName: \"kubernetes.io/projected/519df139-a232-4218-a9ba-4d626fe3d115-kube-api-access-f52lf\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.974001 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/519df139-a232-4218-a9ba-4d626fe3d115-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.974155 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.974435 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/519df139-a232-4218-a9ba-4d626fe3d115-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.974822 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/519df139-a232-4218-a9ba-4d626fe3d115-logs\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.983801 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.987189 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-config-data\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.987266 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.995342 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-scripts\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:12 crc kubenswrapper[4689]: I1013 21:27:12.998316 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f52lf\" (UniqueName: \"kubernetes.io/projected/519df139-a232-4218-a9ba-4d626fe3d115-kube-api-access-f52lf\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:13 crc kubenswrapper[4689]: I1013 21:27:13.031826 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:27:13 crc kubenswrapper[4689]: I1013 21:27:13.117030 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 21:27:13 crc kubenswrapper[4689]: I1013 21:27:13.144864 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:27:13 crc kubenswrapper[4689]: I1013 21:27:13.738898 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 21:27:13 crc kubenswrapper[4689]: I1013 21:27:13.752880 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c0bc4829-b428-4d71-aeea-719aa026dcc0","Type":"ContainerStarted","Data":"4a838a606986f7a7b6fbde47b397b75c844d3997c4c66c8fb463b4c7075d80e1"} Oct 13 21:27:13 crc kubenswrapper[4689]: I1013 21:27:13.752947 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c0bc4829-b428-4d71-aeea-719aa026dcc0","Type":"ContainerStarted","Data":"f2c5d0e526947325f049d9d9a74d5967cf6120ae68cc7db0edbf0efc4ed2241b"} Oct 13 21:27:13 crc kubenswrapper[4689]: I1013 21:27:13.755348 4689 generic.go:334] "Generic (PLEG): container finished" podID="73cfa0da-2c68-445e-9d41-c2cee9857776" containerID="71c8ac33010c29d283dec138712b33674ff75ae4aa042d2ed76733062146f7f9" exitCode=0 Oct 13 21:27:13 crc kubenswrapper[4689]: I1013 21:27:13.756286 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-w2fqn" event={"ID":"73cfa0da-2c68-445e-9d41-c2cee9857776","Type":"ContainerDied","Data":"71c8ac33010c29d283dec138712b33674ff75ae4aa042d2ed76733062146f7f9"} Oct 13 21:27:13 crc kubenswrapper[4689]: I1013 21:27:13.897110 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37236ceb-29ab-4e07-9e6f-6642dbed073c" path="/var/lib/kubelet/pods/37236ceb-29ab-4e07-9e6f-6642dbed073c/volumes" Oct 13 21:27:14 crc kubenswrapper[4689]: I1013 21:27:14.778863 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"519df139-a232-4218-a9ba-4d626fe3d115","Type":"ContainerStarted","Data":"0882d60c720ad049ea0aca790fec7010716656be4bc9da02964eb76b95d9ac35"} Oct 13 21:27:14 crc kubenswrapper[4689]: I1013 21:27:14.779345 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"519df139-a232-4218-a9ba-4d626fe3d115","Type":"ContainerStarted","Data":"308eda75dd02282ed98a1a55922058e16de09748eb978c9f067acb3b2cd19443"} Oct 13 21:27:14 crc kubenswrapper[4689]: I1013 21:27:14.790747 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c0bc4829-b428-4d71-aeea-719aa026dcc0","Type":"ContainerStarted","Data":"1298847ffef436f01cca433afe39c7766f3cd21da136c5e98d34689a544c66d9"} Oct 13 21:27:14 crc kubenswrapper[4689]: I1013 21:27:14.820720 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.820704105 podStartE2EDuration="3.820704105s" podCreationTimestamp="2025-10-13 21:27:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:27:14.819917316 +0000 UTC m=+951.738162401" watchObservedRunningTime="2025-10-13 21:27:14.820704105 +0000 UTC m=+951.738949190" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.250219 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.362977 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-credential-keys\") pod \"73cfa0da-2c68-445e-9d41-c2cee9857776\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.363099 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmf2g\" (UniqueName: \"kubernetes.io/projected/73cfa0da-2c68-445e-9d41-c2cee9857776-kube-api-access-qmf2g\") pod \"73cfa0da-2c68-445e-9d41-c2cee9857776\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.363148 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-scripts\") pod \"73cfa0da-2c68-445e-9d41-c2cee9857776\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.363166 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-fernet-keys\") pod \"73cfa0da-2c68-445e-9d41-c2cee9857776\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.363193 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-config-data\") pod \"73cfa0da-2c68-445e-9d41-c2cee9857776\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.363226 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-combined-ca-bundle\") pod \"73cfa0da-2c68-445e-9d41-c2cee9857776\" (UID: \"73cfa0da-2c68-445e-9d41-c2cee9857776\") " Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.371259 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-scripts" (OuterVolumeSpecName: "scripts") pod "73cfa0da-2c68-445e-9d41-c2cee9857776" (UID: "73cfa0da-2c68-445e-9d41-c2cee9857776"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.378454 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "73cfa0da-2c68-445e-9d41-c2cee9857776" (UID: "73cfa0da-2c68-445e-9d41-c2cee9857776"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.379828 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73cfa0da-2c68-445e-9d41-c2cee9857776-kube-api-access-qmf2g" (OuterVolumeSpecName: "kube-api-access-qmf2g") pod "73cfa0da-2c68-445e-9d41-c2cee9857776" (UID: "73cfa0da-2c68-445e-9d41-c2cee9857776"). InnerVolumeSpecName "kube-api-access-qmf2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.389251 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "73cfa0da-2c68-445e-9d41-c2cee9857776" (UID: "73cfa0da-2c68-445e-9d41-c2cee9857776"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.406827 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "73cfa0da-2c68-445e-9d41-c2cee9857776" (UID: "73cfa0da-2c68-445e-9d41-c2cee9857776"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.425781 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-config-data" (OuterVolumeSpecName: "config-data") pod "73cfa0da-2c68-445e-9d41-c2cee9857776" (UID: "73cfa0da-2c68-445e-9d41-c2cee9857776"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.465665 4689 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.465700 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmf2g\" (UniqueName: \"kubernetes.io/projected/73cfa0da-2c68-445e-9d41-c2cee9857776-kube-api-access-qmf2g\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.465718 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.465728 4689 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.465737 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.465745 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73cfa0da-2c68-445e-9d41-c2cee9857776-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.819926 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-w2fqn" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.819911 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-w2fqn" event={"ID":"73cfa0da-2c68-445e-9d41-c2cee9857776","Type":"ContainerDied","Data":"369846f7340beaa0ec4f7cf04fdea4d8321b149c49342b0f29a9dd8c7192778e"} Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.821222 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="369846f7340beaa0ec4f7cf04fdea4d8321b149c49342b0f29a9dd8c7192778e" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.933561 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6f78847c76-48zjm"] Oct 13 21:27:15 crc kubenswrapper[4689]: E1013 21:27:15.933969 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73cfa0da-2c68-445e-9d41-c2cee9857776" containerName="keystone-bootstrap" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.933992 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="73cfa0da-2c68-445e-9d41-c2cee9857776" containerName="keystone-bootstrap" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.934324 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="73cfa0da-2c68-445e-9d41-c2cee9857776" containerName="keystone-bootstrap" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.942955 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6f78847c76-48zjm"] Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.943079 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.947973 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.948556 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.949262 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.949441 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.949630 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 13 21:27:15 crc kubenswrapper[4689]: I1013 21:27:15.949798 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-dqmvt" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.083250 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-fernet-keys\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.083337 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-combined-ca-bundle\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.083534 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-internal-tls-certs\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.083815 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-scripts\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.083982 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-public-tls-certs\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.084056 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-config-data\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.084160 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-credential-keys\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.084231 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7x2lm\" (UniqueName: \"kubernetes.io/projected/bf43c63a-d1f7-492b-9345-d271dd62a7d2-kube-api-access-7x2lm\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.186077 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-public-tls-certs\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.186138 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-config-data\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.186180 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-credential-keys\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.186211 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7x2lm\" (UniqueName: \"kubernetes.io/projected/bf43c63a-d1f7-492b-9345-d271dd62a7d2-kube-api-access-7x2lm\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.186279 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-fernet-keys\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.186457 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-combined-ca-bundle\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.186513 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-internal-tls-certs\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.186550 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-scripts\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.191898 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-scripts\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.192497 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-credential-keys\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.193399 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-config-data\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.193991 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-internal-tls-certs\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.195537 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-fernet-keys\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.203681 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-public-tls-certs\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.209195 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7x2lm\" (UniqueName: \"kubernetes.io/projected/bf43c63a-d1f7-492b-9345-d271dd62a7d2-kube-api-access-7x2lm\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.214352 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf43c63a-d1f7-492b-9345-d271dd62a7d2-combined-ca-bundle\") pod \"keystone-6f78847c76-48zjm\" (UID: \"bf43c63a-d1f7-492b-9345-d271dd62a7d2\") " pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.280381 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.863074 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"519df139-a232-4218-a9ba-4d626fe3d115","Type":"ContainerStarted","Data":"1e326a825f48a16f44e192a8a7ba6b2954101832e8a8a1b8d0b084f1767a19a5"} Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.880825 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6f78847c76-48zjm"] Oct 13 21:27:16 crc kubenswrapper[4689]: I1013 21:27:16.916940 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.916921364 podStartE2EDuration="4.916921364s" podCreationTimestamp="2025-10-13 21:27:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:27:16.915829648 +0000 UTC m=+953.834074743" watchObservedRunningTime="2025-10-13 21:27:16.916921364 +0000 UTC m=+953.835166449" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.204483 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-xsdlt"] Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.205773 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.211416 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-fcz4s" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.211493 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.211731 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.215662 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-xsdlt"] Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.329822 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-config-data\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.330381 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-db-sync-config-data\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.335757 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ch4rt\" (UniqueName: \"kubernetes.io/projected/2c5019e8-86d0-4324-bffc-70583983b377-kube-api-access-ch4rt\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.335962 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c5019e8-86d0-4324-bffc-70583983b377-etc-machine-id\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.336009 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-scripts\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.336209 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-combined-ca-bundle\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.413667 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-8lmgj"] Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.414876 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-8lmgj" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.416431 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-8lmgj"] Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.424560 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.432210 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-7gbvh" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.437841 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-combined-ca-bundle\") pod \"barbican-db-sync-8lmgj\" (UID: \"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579\") " pod="openstack/barbican-db-sync-8lmgj" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.437908 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ch4rt\" (UniqueName: \"kubernetes.io/projected/2c5019e8-86d0-4324-bffc-70583983b377-kube-api-access-ch4rt\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.437931 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6bx6\" (UniqueName: \"kubernetes.io/projected/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-kube-api-access-t6bx6\") pod \"barbican-db-sync-8lmgj\" (UID: \"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579\") " pod="openstack/barbican-db-sync-8lmgj" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.437957 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c5019e8-86d0-4324-bffc-70583983b377-etc-machine-id\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.437977 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-scripts\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.438014 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-combined-ca-bundle\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.438030 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-db-sync-config-data\") pod \"barbican-db-sync-8lmgj\" (UID: \"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579\") " pod="openstack/barbican-db-sync-8lmgj" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.438094 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-config-data\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.438114 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-db-sync-config-data\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.448576 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c5019e8-86d0-4324-bffc-70583983b377-etc-machine-id\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.448935 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-db-sync-config-data\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.454691 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-config-data\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.455028 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-scripts\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.460384 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-combined-ca-bundle\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.474348 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ch4rt\" (UniqueName: \"kubernetes.io/projected/2c5019e8-86d0-4324-bffc-70583983b377-kube-api-access-ch4rt\") pod \"cinder-db-sync-xsdlt\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.512428 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-d68g8"] Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.515443 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-d68g8" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.525480 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.525933 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-5jvkq" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.526192 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.535713 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.540076 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-combined-ca-bundle\") pod \"barbican-db-sync-8lmgj\" (UID: \"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579\") " pod="openstack/barbican-db-sync-8lmgj" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.540149 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2s4q\" (UniqueName: \"kubernetes.io/projected/632487e3-a6f0-4e53-bb4e-33454874ddca-kube-api-access-s2s4q\") pod \"neutron-db-sync-d68g8\" (UID: \"632487e3-a6f0-4e53-bb4e-33454874ddca\") " pod="openstack/neutron-db-sync-d68g8" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.540183 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6bx6\" (UniqueName: \"kubernetes.io/projected/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-kube-api-access-t6bx6\") pod \"barbican-db-sync-8lmgj\" (UID: \"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579\") " pod="openstack/barbican-db-sync-8lmgj" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.540220 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632487e3-a6f0-4e53-bb4e-33454874ddca-combined-ca-bundle\") pod \"neutron-db-sync-d68g8\" (UID: \"632487e3-a6f0-4e53-bb4e-33454874ddca\") " pod="openstack/neutron-db-sync-d68g8" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.540255 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-db-sync-config-data\") pod \"barbican-db-sync-8lmgj\" (UID: \"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579\") " pod="openstack/barbican-db-sync-8lmgj" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.540293 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/632487e3-a6f0-4e53-bb4e-33454874ddca-config\") pod \"neutron-db-sync-d68g8\" (UID: \"632487e3-a6f0-4e53-bb4e-33454874ddca\") " pod="openstack/neutron-db-sync-d68g8" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.546168 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-combined-ca-bundle\") pod \"barbican-db-sync-8lmgj\" (UID: \"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579\") " pod="openstack/barbican-db-sync-8lmgj" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.550188 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-d68g8"] Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.555156 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-db-sync-config-data\") pod \"barbican-db-sync-8lmgj\" (UID: \"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579\") " pod="openstack/barbican-db-sync-8lmgj" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.570488 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6bx6\" (UniqueName: \"kubernetes.io/projected/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-kube-api-access-t6bx6\") pod \"barbican-db-sync-8lmgj\" (UID: \"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579\") " pod="openstack/barbican-db-sync-8lmgj" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.643333 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2s4q\" (UniqueName: \"kubernetes.io/projected/632487e3-a6f0-4e53-bb4e-33454874ddca-kube-api-access-s2s4q\") pod \"neutron-db-sync-d68g8\" (UID: \"632487e3-a6f0-4e53-bb4e-33454874ddca\") " pod="openstack/neutron-db-sync-d68g8" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.643727 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632487e3-a6f0-4e53-bb4e-33454874ddca-combined-ca-bundle\") pod \"neutron-db-sync-d68g8\" (UID: \"632487e3-a6f0-4e53-bb4e-33454874ddca\") " pod="openstack/neutron-db-sync-d68g8" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.643845 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/632487e3-a6f0-4e53-bb4e-33454874ddca-config\") pod \"neutron-db-sync-d68g8\" (UID: \"632487e3-a6f0-4e53-bb4e-33454874ddca\") " pod="openstack/neutron-db-sync-d68g8" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.649782 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/632487e3-a6f0-4e53-bb4e-33454874ddca-config\") pod \"neutron-db-sync-d68g8\" (UID: \"632487e3-a6f0-4e53-bb4e-33454874ddca\") " pod="openstack/neutron-db-sync-d68g8" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.650659 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632487e3-a6f0-4e53-bb4e-33454874ddca-combined-ca-bundle\") pod \"neutron-db-sync-d68g8\" (UID: \"632487e3-a6f0-4e53-bb4e-33454874ddca\") " pod="openstack/neutron-db-sync-d68g8" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.675884 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2s4q\" (UniqueName: \"kubernetes.io/projected/632487e3-a6f0-4e53-bb4e-33454874ddca-kube-api-access-s2s4q\") pod \"neutron-db-sync-d68g8\" (UID: \"632487e3-a6f0-4e53-bb4e-33454874ddca\") " pod="openstack/neutron-db-sync-d68g8" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.722511 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-d68g8" Oct 13 21:27:17 crc kubenswrapper[4689]: I1013 21:27:17.764960 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-8lmgj" Oct 13 21:27:21 crc kubenswrapper[4689]: I1013 21:27:21.641410 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-795fd646bb-2s89l" podUID="fd448066-1b70-4e35-959c-5c702d87560f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Oct 13 21:27:21 crc kubenswrapper[4689]: I1013 21:27:21.734866 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-56b8966ffb-99krc" podUID="d46a395d-e4aa-45cb-85a7-86a43d5d7371" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Oct 13 21:27:21 crc kubenswrapper[4689]: I1013 21:27:21.948027 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6f78847c76-48zjm" event={"ID":"bf43c63a-d1f7-492b-9345-d271dd62a7d2","Type":"ContainerStarted","Data":"19699881efcc23769fb2f5fe85797cf2b427a0977f4f0b70b064689ae880516c"} Oct 13 21:27:22 crc kubenswrapper[4689]: I1013 21:27:22.073438 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 13 21:27:22 crc kubenswrapper[4689]: I1013 21:27:22.073962 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 13 21:27:22 crc kubenswrapper[4689]: I1013 21:27:22.152641 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 13 21:27:22 crc kubenswrapper[4689]: I1013 21:27:22.177664 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 13 21:27:22 crc kubenswrapper[4689]: I1013 21:27:22.308737 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-xsdlt"] Oct 13 21:27:22 crc kubenswrapper[4689]: W1013 21:27:22.318365 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c5019e8_86d0_4324_bffc_70583983b377.slice/crio-3c9e77c5dad8f4500a3b9dbc8c961dbe48f5b6cbc8b860f40dadd13c2893471c WatchSource:0}: Error finding container 3c9e77c5dad8f4500a3b9dbc8c961dbe48f5b6cbc8b860f40dadd13c2893471c: Status 404 returned error can't find the container with id 3c9e77c5dad8f4500a3b9dbc8c961dbe48f5b6cbc8b860f40dadd13c2893471c Oct 13 21:27:22 crc kubenswrapper[4689]: I1013 21:27:22.415492 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-d68g8"] Oct 13 21:27:22 crc kubenswrapper[4689]: W1013 21:27:22.419260 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod632487e3_a6f0_4e53_bb4e_33454874ddca.slice/crio-26121bf21bf021cd1a0039e0269ac2cff885e760cc3d1a7cf1b1eca1b6335215 WatchSource:0}: Error finding container 26121bf21bf021cd1a0039e0269ac2cff885e760cc3d1a7cf1b1eca1b6335215: Status 404 returned error can't find the container with id 26121bf21bf021cd1a0039e0269ac2cff885e760cc3d1a7cf1b1eca1b6335215 Oct 13 21:27:22 crc kubenswrapper[4689]: I1013 21:27:22.580399 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-8lmgj"] Oct 13 21:27:22 crc kubenswrapper[4689]: I1013 21:27:22.969781 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-xsdlt" event={"ID":"2c5019e8-86d0-4324-bffc-70583983b377","Type":"ContainerStarted","Data":"3c9e77c5dad8f4500a3b9dbc8c961dbe48f5b6cbc8b860f40dadd13c2893471c"} Oct 13 21:27:22 crc kubenswrapper[4689]: I1013 21:27:22.971775 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-4cb78" event={"ID":"7a541987-5ad6-4f2f-b625-5b8105b669e5","Type":"ContainerStarted","Data":"522ac4a1ac29734f14e61989d38b83d6927cac1cba68ee5921cc1c2e4bd4d59f"} Oct 13 21:27:22 crc kubenswrapper[4689]: I1013 21:27:22.979384 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a37ccb01-d27b-43e7-bf7d-902a962053f4","Type":"ContainerStarted","Data":"4444c7113b201137a9d7543efe4c710bd829594e77e68b6b7c121d0865e0823a"} Oct 13 21:27:22 crc kubenswrapper[4689]: I1013 21:27:22.981942 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-d68g8" event={"ID":"632487e3-a6f0-4e53-bb4e-33454874ddca","Type":"ContainerStarted","Data":"f6efecc3b0c1dffd9253a9bf10c4856e50d1a86e84e418e6d1bc3aa2a25690cb"} Oct 13 21:27:22 crc kubenswrapper[4689]: I1013 21:27:22.981979 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-d68g8" event={"ID":"632487e3-a6f0-4e53-bb4e-33454874ddca","Type":"ContainerStarted","Data":"26121bf21bf021cd1a0039e0269ac2cff885e760cc3d1a7cf1b1eca1b6335215"} Oct 13 21:27:22 crc kubenswrapper[4689]: I1013 21:27:22.984743 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-8lmgj" event={"ID":"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579","Type":"ContainerStarted","Data":"e603df4d22ae08ce0f30e8e821b2bd71381d745ab78bda7bc81f0f66d9f9e6a6"} Oct 13 21:27:22 crc kubenswrapper[4689]: I1013 21:27:22.994029 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6f78847c76-48zjm" event={"ID":"bf43c63a-d1f7-492b-9345-d271dd62a7d2","Type":"ContainerStarted","Data":"54d0fc54ab1bfed1fd87531d7f06b07d07bcd2e97a157f5c9caee5a4a637a64b"} Oct 13 21:27:22 crc kubenswrapper[4689]: I1013 21:27:22.995143 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 13 21:27:22 crc kubenswrapper[4689]: I1013 21:27:22.995264 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 13 21:27:22 crc kubenswrapper[4689]: I1013 21:27:22.995890 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:23 crc kubenswrapper[4689]: I1013 21:27:23.004643 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-4cb78" podStartSLOduration=3.236556425 podStartE2EDuration="34.004574064s" podCreationTimestamp="2025-10-13 21:26:49 +0000 UTC" firstStartedPulling="2025-10-13 21:26:51.148192245 +0000 UTC m=+928.066437330" lastFinishedPulling="2025-10-13 21:27:21.916209884 +0000 UTC m=+958.834454969" observedRunningTime="2025-10-13 21:27:22.98876287 +0000 UTC m=+959.907007955" watchObservedRunningTime="2025-10-13 21:27:23.004574064 +0000 UTC m=+959.922819159" Oct 13 21:27:23 crc kubenswrapper[4689]: I1013 21:27:23.006990 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-d68g8" podStartSLOduration=6.006963389 podStartE2EDuration="6.006963389s" podCreationTimestamp="2025-10-13 21:27:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:27:23.006510669 +0000 UTC m=+959.924755754" watchObservedRunningTime="2025-10-13 21:27:23.006963389 +0000 UTC m=+959.925208474" Oct 13 21:27:23 crc kubenswrapper[4689]: I1013 21:27:23.040874 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6f78847c76-48zjm" podStartSLOduration=8.040850738 podStartE2EDuration="8.040850738s" podCreationTimestamp="2025-10-13 21:27:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:27:23.033697099 +0000 UTC m=+959.951942184" watchObservedRunningTime="2025-10-13 21:27:23.040850738 +0000 UTC m=+959.959095823" Oct 13 21:27:23 crc kubenswrapper[4689]: I1013 21:27:23.118341 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 13 21:27:23 crc kubenswrapper[4689]: I1013 21:27:23.118416 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 13 21:27:23 crc kubenswrapper[4689]: I1013 21:27:23.179935 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 13 21:27:23 crc kubenswrapper[4689]: I1013 21:27:23.193443 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 13 21:27:24 crc kubenswrapper[4689]: I1013 21:27:24.021460 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 13 21:27:24 crc kubenswrapper[4689]: I1013 21:27:24.021526 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 13 21:27:25 crc kubenswrapper[4689]: I1013 21:27:25.041644 4689 generic.go:334] "Generic (PLEG): container finished" podID="7a541987-5ad6-4f2f-b625-5b8105b669e5" containerID="522ac4a1ac29734f14e61989d38b83d6927cac1cba68ee5921cc1c2e4bd4d59f" exitCode=0 Oct 13 21:27:25 crc kubenswrapper[4689]: I1013 21:27:25.041787 4689 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 21:27:25 crc kubenswrapper[4689]: I1013 21:27:25.041766 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-4cb78" event={"ID":"7a541987-5ad6-4f2f-b625-5b8105b669e5","Type":"ContainerDied","Data":"522ac4a1ac29734f14e61989d38b83d6927cac1cba68ee5921cc1c2e4bd4d59f"} Oct 13 21:27:25 crc kubenswrapper[4689]: I1013 21:27:25.041811 4689 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 21:27:25 crc kubenswrapper[4689]: I1013 21:27:25.295226 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 13 21:27:25 crc kubenswrapper[4689]: I1013 21:27:25.301183 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 13 21:27:26 crc kubenswrapper[4689]: I1013 21:27:26.056860 4689 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 21:27:26 crc kubenswrapper[4689]: I1013 21:27:26.056903 4689 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 21:27:26 crc kubenswrapper[4689]: I1013 21:27:26.673681 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 13 21:27:26 crc kubenswrapper[4689]: I1013 21:27:26.686663 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 13 21:27:29 crc kubenswrapper[4689]: I1013 21:27:29.443942 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-4cb78" Oct 13 21:27:29 crc kubenswrapper[4689]: I1013 21:27:29.574051 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cq48s\" (UniqueName: \"kubernetes.io/projected/7a541987-5ad6-4f2f-b625-5b8105b669e5-kube-api-access-cq48s\") pod \"7a541987-5ad6-4f2f-b625-5b8105b669e5\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " Oct 13 21:27:29 crc kubenswrapper[4689]: I1013 21:27:29.575192 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a541987-5ad6-4f2f-b625-5b8105b669e5-logs\") pod \"7a541987-5ad6-4f2f-b625-5b8105b669e5\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " Oct 13 21:27:29 crc kubenswrapper[4689]: I1013 21:27:29.575268 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-combined-ca-bundle\") pod \"7a541987-5ad6-4f2f-b625-5b8105b669e5\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " Oct 13 21:27:29 crc kubenswrapper[4689]: I1013 21:27:29.575348 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-scripts\") pod \"7a541987-5ad6-4f2f-b625-5b8105b669e5\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " Oct 13 21:27:29 crc kubenswrapper[4689]: I1013 21:27:29.575475 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-config-data\") pod \"7a541987-5ad6-4f2f-b625-5b8105b669e5\" (UID: \"7a541987-5ad6-4f2f-b625-5b8105b669e5\") " Oct 13 21:27:29 crc kubenswrapper[4689]: I1013 21:27:29.575653 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a541987-5ad6-4f2f-b625-5b8105b669e5-logs" (OuterVolumeSpecName: "logs") pod "7a541987-5ad6-4f2f-b625-5b8105b669e5" (UID: "7a541987-5ad6-4f2f-b625-5b8105b669e5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:27:29 crc kubenswrapper[4689]: I1013 21:27:29.575893 4689 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a541987-5ad6-4f2f-b625-5b8105b669e5-logs\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:29 crc kubenswrapper[4689]: I1013 21:27:29.583878 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a541987-5ad6-4f2f-b625-5b8105b669e5-kube-api-access-cq48s" (OuterVolumeSpecName: "kube-api-access-cq48s") pod "7a541987-5ad6-4f2f-b625-5b8105b669e5" (UID: "7a541987-5ad6-4f2f-b625-5b8105b669e5"). InnerVolumeSpecName "kube-api-access-cq48s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:27:29 crc kubenswrapper[4689]: I1013 21:27:29.611374 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-scripts" (OuterVolumeSpecName: "scripts") pod "7a541987-5ad6-4f2f-b625-5b8105b669e5" (UID: "7a541987-5ad6-4f2f-b625-5b8105b669e5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:29 crc kubenswrapper[4689]: I1013 21:27:29.613450 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7a541987-5ad6-4f2f-b625-5b8105b669e5" (UID: "7a541987-5ad6-4f2f-b625-5b8105b669e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:29 crc kubenswrapper[4689]: I1013 21:27:29.617146 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-config-data" (OuterVolumeSpecName: "config-data") pod "7a541987-5ad6-4f2f-b625-5b8105b669e5" (UID: "7a541987-5ad6-4f2f-b625-5b8105b669e5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:29 crc kubenswrapper[4689]: I1013 21:27:29.677270 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:29 crc kubenswrapper[4689]: I1013 21:27:29.677311 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:29 crc kubenswrapper[4689]: I1013 21:27:29.677323 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a541987-5ad6-4f2f-b625-5b8105b669e5-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:29 crc kubenswrapper[4689]: I1013 21:27:29.677336 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cq48s\" (UniqueName: \"kubernetes.io/projected/7a541987-5ad6-4f2f-b625-5b8105b669e5-kube-api-access-cq48s\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.106032 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-4cb78" event={"ID":"7a541987-5ad6-4f2f-b625-5b8105b669e5","Type":"ContainerDied","Data":"376b6793ff24e324dbb76b37873e298a6384bbead4cda6687e04d756ec286310"} Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.106571 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="376b6793ff24e324dbb76b37873e298a6384bbead4cda6687e04d756ec286310" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.106154 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-4cb78" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.550881 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-7d89fff484-q9fvk"] Oct 13 21:27:30 crc kubenswrapper[4689]: E1013 21:27:30.552253 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a541987-5ad6-4f2f-b625-5b8105b669e5" containerName="placement-db-sync" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.552324 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a541987-5ad6-4f2f-b625-5b8105b669e5" containerName="placement-db-sync" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.552563 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a541987-5ad6-4f2f-b625-5b8105b669e5" containerName="placement-db-sync" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.553641 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.556037 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.556360 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.556499 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.556837 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.567150 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-bxgvc" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.569579 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7d89fff484-q9fvk"] Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.602125 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-scripts\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.602170 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-logs\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.602210 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-combined-ca-bundle\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.602251 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-config-data\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.602281 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-internal-tls-certs\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.602341 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjb6z\" (UniqueName: \"kubernetes.io/projected/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-kube-api-access-tjb6z\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.602389 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-public-tls-certs\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.703877 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-combined-ca-bundle\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.704706 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-config-data\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.704862 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-internal-tls-certs\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.705028 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjb6z\" (UniqueName: \"kubernetes.io/projected/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-kube-api-access-tjb6z\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.705178 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-public-tls-certs\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.705285 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-scripts\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.705384 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-logs\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.706270 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-logs\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.711459 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-config-data\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.715948 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-scripts\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.718209 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-combined-ca-bundle\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.736051 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjb6z\" (UniqueName: \"kubernetes.io/projected/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-kube-api-access-tjb6z\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.739057 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-internal-tls-certs\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.739471 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5-public-tls-certs\") pod \"placement-7d89fff484-q9fvk\" (UID: \"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5\") " pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:30 crc kubenswrapper[4689]: I1013 21:27:30.878085 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:31 crc kubenswrapper[4689]: I1013 21:27:31.631006 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-795fd646bb-2s89l" podUID="fd448066-1b70-4e35-959c-5c702d87560f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Oct 13 21:27:31 crc kubenswrapper[4689]: I1013 21:27:31.732710 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-56b8966ffb-99krc" podUID="d46a395d-e4aa-45cb-85a7-86a43d5d7371" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Oct 13 21:27:37 crc kubenswrapper[4689]: I1013 21:27:37.173347 4689 generic.go:334] "Generic (PLEG): container finished" podID="632487e3-a6f0-4e53-bb4e-33454874ddca" containerID="f6efecc3b0c1dffd9253a9bf10c4856e50d1a86e84e418e6d1bc3aa2a25690cb" exitCode=0 Oct 13 21:27:37 crc kubenswrapper[4689]: I1013 21:27:37.173420 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-d68g8" event={"ID":"632487e3-a6f0-4e53-bb4e-33454874ddca","Type":"ContainerDied","Data":"f6efecc3b0c1dffd9253a9bf10c4856e50d1a86e84e418e6d1bc3aa2a25690cb"} Oct 13 21:27:40 crc kubenswrapper[4689]: I1013 21:27:40.198455 4689 generic.go:334] "Generic (PLEG): container finished" podID="5c0aa883-db89-41fe-b956-cbb3994efcc7" containerID="d92b8f3891e2810eea5ec9bba43ca2571fec7d80219635e9e753d63a03b94a13" exitCode=137 Oct 13 21:27:40 crc kubenswrapper[4689]: I1013 21:27:40.198791 4689 generic.go:334] "Generic (PLEG): container finished" podID="5c0aa883-db89-41fe-b956-cbb3994efcc7" containerID="89057d727fd0c0d650ec83c4141afb37e1a87c1cf9a929f2f7a5fd9706691ff1" exitCode=137 Oct 13 21:27:40 crc kubenswrapper[4689]: I1013 21:27:40.198675 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c5756884f-lw5tc" event={"ID":"5c0aa883-db89-41fe-b956-cbb3994efcc7","Type":"ContainerDied","Data":"d92b8f3891e2810eea5ec9bba43ca2571fec7d80219635e9e753d63a03b94a13"} Oct 13 21:27:40 crc kubenswrapper[4689]: I1013 21:27:40.198875 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c5756884f-lw5tc" event={"ID":"5c0aa883-db89-41fe-b956-cbb3994efcc7","Type":"ContainerDied","Data":"89057d727fd0c0d650ec83c4141afb37e1a87c1cf9a929f2f7a5fd9706691ff1"} Oct 13 21:27:40 crc kubenswrapper[4689]: I1013 21:27:40.201075 4689 generic.go:334] "Generic (PLEG): container finished" podID="5f582acd-f393-4d97-a002-186f0ccb1c86" containerID="178496e8e51f9fc6b23684c0babc353897234e15ac7624aaaaf2a3bba1211455" exitCode=137 Oct 13 21:27:40 crc kubenswrapper[4689]: I1013 21:27:40.201092 4689 generic.go:334] "Generic (PLEG): container finished" podID="5f582acd-f393-4d97-a002-186f0ccb1c86" containerID="81c20e55b5748fa6821d7803f8849af873c2ad22493e84bee684a4cc924c09dd" exitCode=137 Oct 13 21:27:40 crc kubenswrapper[4689]: I1013 21:27:40.201125 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d5fb75fdf-9bkv8" event={"ID":"5f582acd-f393-4d97-a002-186f0ccb1c86","Type":"ContainerDied","Data":"178496e8e51f9fc6b23684c0babc353897234e15ac7624aaaaf2a3bba1211455"} Oct 13 21:27:40 crc kubenswrapper[4689]: I1013 21:27:40.201141 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d5fb75fdf-9bkv8" event={"ID":"5f582acd-f393-4d97-a002-186f0ccb1c86","Type":"ContainerDied","Data":"81c20e55b5748fa6821d7803f8849af873c2ad22493e84bee684a4cc924c09dd"} Oct 13 21:27:40 crc kubenswrapper[4689]: I1013 21:27:40.202870 4689 generic.go:334] "Generic (PLEG): container finished" podID="b374e925-6d17-4ef2-8e3b-561b0a8b6a8f" containerID="7aafd053d88eb4bb981e2a34bd3dd8ef2e2addad3e5db9743260fdc7a92cc8aa" exitCode=137 Oct 13 21:27:40 crc kubenswrapper[4689]: I1013 21:27:40.202887 4689 generic.go:334] "Generic (PLEG): container finished" podID="b374e925-6d17-4ef2-8e3b-561b0a8b6a8f" containerID="e226370ef3d71605744db19b503788f9615bc782e87834c65b662671f1cc5f23" exitCode=137 Oct 13 21:27:40 crc kubenswrapper[4689]: I1013 21:27:40.202900 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-776df869fc-xvpk7" event={"ID":"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f","Type":"ContainerDied","Data":"7aafd053d88eb4bb981e2a34bd3dd8ef2e2addad3e5db9743260fdc7a92cc8aa"} Oct 13 21:27:40 crc kubenswrapper[4689]: I1013 21:27:40.202914 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-776df869fc-xvpk7" event={"ID":"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f","Type":"ContainerDied","Data":"e226370ef3d71605744db19b503788f9615bc782e87834c65b662671f1cc5f23"} Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.192257 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-d68g8" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.239567 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/632487e3-a6f0-4e53-bb4e-33454874ddca-config\") pod \"632487e3-a6f0-4e53-bb4e-33454874ddca\" (UID: \"632487e3-a6f0-4e53-bb4e-33454874ddca\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.240134 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2s4q\" (UniqueName: \"kubernetes.io/projected/632487e3-a6f0-4e53-bb4e-33454874ddca-kube-api-access-s2s4q\") pod \"632487e3-a6f0-4e53-bb4e-33454874ddca\" (UID: \"632487e3-a6f0-4e53-bb4e-33454874ddca\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.240198 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632487e3-a6f0-4e53-bb4e-33454874ddca-combined-ca-bundle\") pod \"632487e3-a6f0-4e53-bb4e-33454874ddca\" (UID: \"632487e3-a6f0-4e53-bb4e-33454874ddca\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.245914 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/632487e3-a6f0-4e53-bb4e-33454874ddca-kube-api-access-s2s4q" (OuterVolumeSpecName: "kube-api-access-s2s4q") pod "632487e3-a6f0-4e53-bb4e-33454874ddca" (UID: "632487e3-a6f0-4e53-bb4e-33454874ddca"). InnerVolumeSpecName "kube-api-access-s2s4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.258341 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-d68g8" event={"ID":"632487e3-a6f0-4e53-bb4e-33454874ddca","Type":"ContainerDied","Data":"26121bf21bf021cd1a0039e0269ac2cff885e760cc3d1a7cf1b1eca1b6335215"} Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.258756 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26121bf21bf021cd1a0039e0269ac2cff885e760cc3d1a7cf1b1eca1b6335215" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.258862 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-d68g8" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.268354 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/632487e3-a6f0-4e53-bb4e-33454874ddca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "632487e3-a6f0-4e53-bb4e-33454874ddca" (UID: "632487e3-a6f0-4e53-bb4e-33454874ddca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.273484 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/632487e3-a6f0-4e53-bb4e-33454874ddca-config" (OuterVolumeSpecName: "config") pod "632487e3-a6f0-4e53-bb4e-33454874ddca" (UID: "632487e3-a6f0-4e53-bb4e-33454874ddca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.349804 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/632487e3-a6f0-4e53-bb4e-33454874ddca-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.349846 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2s4q\" (UniqueName: \"kubernetes.io/projected/632487e3-a6f0-4e53-bb4e-33454874ddca-kube-api-access-s2s4q\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.349866 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632487e3-a6f0-4e53-bb4e-33454874ddca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.732144 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.740115 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.869055 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:27:43 crc kubenswrapper[4689]: E1013 21:27:43.870762 4689 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/ubi9/httpd-24:latest" Oct 13 21:27:43 crc kubenswrapper[4689]: E1013 21:27:43.870952 4689 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:proxy-httpd,Image:registry.redhat.io/ubi9/httpd-24:latest,Command:[/usr/sbin/httpd],Args:[-DFOREGROUND],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:proxy-httpd,HostPort:0,ContainerPort:3000,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf/httpd.conf,SubPath:httpd.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf.d/ssl.conf,SubPath:ssl.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:run-httpd,ReadOnly:false,MountPath:/run/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:log-httpd,ReadOnly:false,MountPath:/var/log/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kfx2w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(a37ccb01-d27b-43e7-bf7d-902a962053f4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 13 21:27:43 crc kubenswrapper[4689]: E1013 21:27:43.874663 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"proxy-httpd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="a37ccb01-d27b-43e7-bf7d-902a962053f4" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.897111 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.907445 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.958689 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vstqc\" (UniqueName: \"kubernetes.io/projected/5c0aa883-db89-41fe-b956-cbb3994efcc7-kube-api-access-vstqc\") pod \"5c0aa883-db89-41fe-b956-cbb3994efcc7\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.959072 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lm24n\" (UniqueName: \"kubernetes.io/projected/5f582acd-f393-4d97-a002-186f0ccb1c86-kube-api-access-lm24n\") pod \"5f582acd-f393-4d97-a002-186f0ccb1c86\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.959120 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c0aa883-db89-41fe-b956-cbb3994efcc7-logs\") pod \"5c0aa883-db89-41fe-b956-cbb3994efcc7\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.959155 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f582acd-f393-4d97-a002-186f0ccb1c86-scripts\") pod \"5f582acd-f393-4d97-a002-186f0ccb1c86\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.959175 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcdqw\" (UniqueName: \"kubernetes.io/projected/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-kube-api-access-lcdqw\") pod \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.959235 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c0aa883-db89-41fe-b956-cbb3994efcc7-scripts\") pod \"5c0aa883-db89-41fe-b956-cbb3994efcc7\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.959260 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f582acd-f393-4d97-a002-186f0ccb1c86-logs\") pod \"5f582acd-f393-4d97-a002-186f0ccb1c86\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.959371 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5f582acd-f393-4d97-a002-186f0ccb1c86-horizon-secret-key\") pod \"5f582acd-f393-4d97-a002-186f0ccb1c86\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.959414 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5c0aa883-db89-41fe-b956-cbb3994efcc7-horizon-secret-key\") pod \"5c0aa883-db89-41fe-b956-cbb3994efcc7\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.959431 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-config-data\") pod \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.959447 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-scripts\") pod \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.959486 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c0aa883-db89-41fe-b956-cbb3994efcc7-config-data\") pod \"5c0aa883-db89-41fe-b956-cbb3994efcc7\" (UID: \"5c0aa883-db89-41fe-b956-cbb3994efcc7\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.959509 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5f582acd-f393-4d97-a002-186f0ccb1c86-config-data\") pod \"5f582acd-f393-4d97-a002-186f0ccb1c86\" (UID: \"5f582acd-f393-4d97-a002-186f0ccb1c86\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.959523 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-logs\") pod \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.959555 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-horizon-secret-key\") pod \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\" (UID: \"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f\") " Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.968474 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "b374e925-6d17-4ef2-8e3b-561b0a8b6a8f" (UID: "b374e925-6d17-4ef2-8e3b-561b0a8b6a8f"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.977608 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-logs" (OuterVolumeSpecName: "logs") pod "b374e925-6d17-4ef2-8e3b-561b0a8b6a8f" (UID: "b374e925-6d17-4ef2-8e3b-561b0a8b6a8f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.978576 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f582acd-f393-4d97-a002-186f0ccb1c86-logs" (OuterVolumeSpecName: "logs") pod "5f582acd-f393-4d97-a002-186f0ccb1c86" (UID: "5f582acd-f393-4d97-a002-186f0ccb1c86"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.995797 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c0aa883-db89-41fe-b956-cbb3994efcc7-logs" (OuterVolumeSpecName: "logs") pod "5c0aa883-db89-41fe-b956-cbb3994efcc7" (UID: "5c0aa883-db89-41fe-b956-cbb3994efcc7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:27:43 crc kubenswrapper[4689]: I1013 21:27:43.998429 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c0aa883-db89-41fe-b956-cbb3994efcc7-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "5c0aa883-db89-41fe-b956-cbb3994efcc7" (UID: "5c0aa883-db89-41fe-b956-cbb3994efcc7"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.004377 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f582acd-f393-4d97-a002-186f0ccb1c86-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "5f582acd-f393-4d97-a002-186f0ccb1c86" (UID: "5f582acd-f393-4d97-a002-186f0ccb1c86"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.015503 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-kube-api-access-lcdqw" (OuterVolumeSpecName: "kube-api-access-lcdqw") pod "b374e925-6d17-4ef2-8e3b-561b0a8b6a8f" (UID: "b374e925-6d17-4ef2-8e3b-561b0a8b6a8f"). InnerVolumeSpecName "kube-api-access-lcdqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.023794 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f582acd-f393-4d97-a002-186f0ccb1c86-kube-api-access-lm24n" (OuterVolumeSpecName: "kube-api-access-lm24n") pod "5f582acd-f393-4d97-a002-186f0ccb1c86" (UID: "5f582acd-f393-4d97-a002-186f0ccb1c86"). InnerVolumeSpecName "kube-api-access-lm24n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.029449 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c0aa883-db89-41fe-b956-cbb3994efcc7-kube-api-access-vstqc" (OuterVolumeSpecName: "kube-api-access-vstqc") pod "5c0aa883-db89-41fe-b956-cbb3994efcc7" (UID: "5c0aa883-db89-41fe-b956-cbb3994efcc7"). InnerVolumeSpecName "kube-api-access-vstqc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.046505 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-scripts" (OuterVolumeSpecName: "scripts") pod "b374e925-6d17-4ef2-8e3b-561b0a8b6a8f" (UID: "b374e925-6d17-4ef2-8e3b-561b0a8b6a8f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.060080 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c0aa883-db89-41fe-b956-cbb3994efcc7-scripts" (OuterVolumeSpecName: "scripts") pod "5c0aa883-db89-41fe-b956-cbb3994efcc7" (UID: "5c0aa883-db89-41fe-b956-cbb3994efcc7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.061397 4689 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5c0aa883-db89-41fe-b956-cbb3994efcc7-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.061424 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.061434 4689 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-logs\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.061442 4689 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.061455 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lm24n\" (UniqueName: \"kubernetes.io/projected/5f582acd-f393-4d97-a002-186f0ccb1c86-kube-api-access-lm24n\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.061469 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vstqc\" (UniqueName: \"kubernetes.io/projected/5c0aa883-db89-41fe-b956-cbb3994efcc7-kube-api-access-vstqc\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.061479 4689 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c0aa883-db89-41fe-b956-cbb3994efcc7-logs\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.061487 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcdqw\" (UniqueName: \"kubernetes.io/projected/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-kube-api-access-lcdqw\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.061496 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c0aa883-db89-41fe-b956-cbb3994efcc7-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.061504 4689 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f582acd-f393-4d97-a002-186f0ccb1c86-logs\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.061511 4689 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5f582acd-f393-4d97-a002-186f0ccb1c86-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.079350 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-config-data" (OuterVolumeSpecName: "config-data") pod "b374e925-6d17-4ef2-8e3b-561b0a8b6a8f" (UID: "b374e925-6d17-4ef2-8e3b-561b0a8b6a8f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.088257 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c0aa883-db89-41fe-b956-cbb3994efcc7-config-data" (OuterVolumeSpecName: "config-data") pod "5c0aa883-db89-41fe-b956-cbb3994efcc7" (UID: "5c0aa883-db89-41fe-b956-cbb3994efcc7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.118306 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f582acd-f393-4d97-a002-186f0ccb1c86-config-data" (OuterVolumeSpecName: "config-data") pod "5f582acd-f393-4d97-a002-186f0ccb1c86" (UID: "5f582acd-f393-4d97-a002-186f0ccb1c86"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.126211 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f582acd-f393-4d97-a002-186f0ccb1c86-scripts" (OuterVolumeSpecName: "scripts") pod "5f582acd-f393-4d97-a002-186f0ccb1c86" (UID: "5f582acd-f393-4d97-a002-186f0ccb1c86"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.167458 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.167563 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5c0aa883-db89-41fe-b956-cbb3994efcc7-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.167642 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5f582acd-f393-4d97-a002-186f0ccb1c86-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.167696 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f582acd-f393-4d97-a002-186f0ccb1c86-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.273381 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-776df869fc-xvpk7" event={"ID":"b374e925-6d17-4ef2-8e3b-561b0a8b6a8f","Type":"ContainerDied","Data":"be623f115eb710961baec56bcc93397a8897304e2d431339f66e217e627be345"} Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.273428 4689 scope.go:117] "RemoveContainer" containerID="7aafd053d88eb4bb981e2a34bd3dd8ef2e2addad3e5db9743260fdc7a92cc8aa" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.273548 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-776df869fc-xvpk7" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.286686 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c5756884f-lw5tc" event={"ID":"5c0aa883-db89-41fe-b956-cbb3994efcc7","Type":"ContainerDied","Data":"582a544cb31d5584c9045318e61cebbb31bf96375b21226ac87210521f1392a4"} Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.286776 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c5756884f-lw5tc" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.311003 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a37ccb01-d27b-43e7-bf7d-902a962053f4" containerName="ceilometer-central-agent" containerID="cri-o://8cbbfa6c2cf50aa25c9179fc5873db4c670f6fa3395f19d3e44f620a081a4d23" gracePeriod=30 Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.311053 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d5fb75fdf-9bkv8" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.311069 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d5fb75fdf-9bkv8" event={"ID":"5f582acd-f393-4d97-a002-186f0ccb1c86","Type":"ContainerDied","Data":"b66f0d0670747c6defbd0fdf098b8770216733b057cfd3d25c963596a949098a"} Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.311098 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a37ccb01-d27b-43e7-bf7d-902a962053f4" containerName="sg-core" containerID="cri-o://4444c7113b201137a9d7543efe4c710bd829594e77e68b6b7c121d0865e0823a" gracePeriod=30 Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.311142 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a37ccb01-d27b-43e7-bf7d-902a962053f4" containerName="ceilometer-notification-agent" containerID="cri-o://0dd8099122cbe47ada37da4eaf5c5ccdf760a7dc9073d5934f7e647ab2009cbd" gracePeriod=30 Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.340195 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-776df869fc-xvpk7"] Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.419628 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-776df869fc-xvpk7"] Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.516405 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-4fqs2"] Oct 13 21:27:44 crc kubenswrapper[4689]: E1013 21:27:44.517673 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b374e925-6d17-4ef2-8e3b-561b0a8b6a8f" containerName="horizon" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.517703 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="b374e925-6d17-4ef2-8e3b-561b0a8b6a8f" containerName="horizon" Oct 13 21:27:44 crc kubenswrapper[4689]: E1013 21:27:44.517723 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c0aa883-db89-41fe-b956-cbb3994efcc7" containerName="horizon-log" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.517749 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c0aa883-db89-41fe-b956-cbb3994efcc7" containerName="horizon-log" Oct 13 21:27:44 crc kubenswrapper[4689]: E1013 21:27:44.517761 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f582acd-f393-4d97-a002-186f0ccb1c86" containerName="horizon-log" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.517770 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f582acd-f393-4d97-a002-186f0ccb1c86" containerName="horizon-log" Oct 13 21:27:44 crc kubenswrapper[4689]: E1013 21:27:44.517797 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b374e925-6d17-4ef2-8e3b-561b0a8b6a8f" containerName="horizon-log" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.517804 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="b374e925-6d17-4ef2-8e3b-561b0a8b6a8f" containerName="horizon-log" Oct 13 21:27:44 crc kubenswrapper[4689]: E1013 21:27:44.517823 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f582acd-f393-4d97-a002-186f0ccb1c86" containerName="horizon" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.517831 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f582acd-f393-4d97-a002-186f0ccb1c86" containerName="horizon" Oct 13 21:27:44 crc kubenswrapper[4689]: E1013 21:27:44.517842 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="632487e3-a6f0-4e53-bb4e-33454874ddca" containerName="neutron-db-sync" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.517850 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="632487e3-a6f0-4e53-bb4e-33454874ddca" containerName="neutron-db-sync" Oct 13 21:27:44 crc kubenswrapper[4689]: E1013 21:27:44.517867 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c0aa883-db89-41fe-b956-cbb3994efcc7" containerName="horizon" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.517874 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c0aa883-db89-41fe-b956-cbb3994efcc7" containerName="horizon" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.518093 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c0aa883-db89-41fe-b956-cbb3994efcc7" containerName="horizon-log" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.518112 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="b374e925-6d17-4ef2-8e3b-561b0a8b6a8f" containerName="horizon-log" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.518127 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f582acd-f393-4d97-a002-186f0ccb1c86" containerName="horizon-log" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.518144 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="b374e925-6d17-4ef2-8e3b-561b0a8b6a8f" containerName="horizon" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.518157 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="632487e3-a6f0-4e53-bb4e-33454874ddca" containerName="neutron-db-sync" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.518171 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f582acd-f393-4d97-a002-186f0ccb1c86" containerName="horizon" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.518187 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c0aa883-db89-41fe-b956-cbb3994efcc7" containerName="horizon" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.524155 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.552660 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7d89fff484-q9fvk"] Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.560661 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-4fqs2"] Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.572546 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-c5756884f-lw5tc"] Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.575560 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8xdq\" (UniqueName: \"kubernetes.io/projected/836053c1-e5a4-4e49-a688-f4533fb5d605-kube-api-access-d8xdq\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.575622 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-dns-svc\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.575643 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.575685 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.575704 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-config\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.575822 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.580454 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-c5756884f-lw5tc"] Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.598264 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-d5fb75fdf-9bkv8"] Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.610607 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-d5fb75fdf-9bkv8"] Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.623074 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-86dc7995bd-76xtf"] Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.624807 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.629174 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.629230 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.634004 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-86dc7995bd-76xtf"] Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.634156 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.635058 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-5jvkq" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.665799 4689 scope.go:117] "RemoveContainer" containerID="e226370ef3d71605744db19b503788f9615bc782e87834c65b662671f1cc5f23" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.677426 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8xdq\" (UniqueName: \"kubernetes.io/projected/836053c1-e5a4-4e49-a688-f4533fb5d605-kube-api-access-d8xdq\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.677470 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-dns-svc\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.677491 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.677517 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-config\") pod \"neutron-86dc7995bd-76xtf\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.677550 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.677567 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57t9l\" (UniqueName: \"kubernetes.io/projected/140a26a7-0308-4d50-b2ec-d2e55be6b812-kube-api-access-57t9l\") pod \"neutron-86dc7995bd-76xtf\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.677602 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-config\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.677620 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-httpd-config\") pod \"neutron-86dc7995bd-76xtf\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.677679 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-combined-ca-bundle\") pod \"neutron-86dc7995bd-76xtf\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.677716 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-ovndb-tls-certs\") pod \"neutron-86dc7995bd-76xtf\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.677750 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.678542 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.679351 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-dns-svc\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.679742 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.679919 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-config\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.679950 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.722702 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8xdq\" (UniqueName: \"kubernetes.io/projected/836053c1-e5a4-4e49-a688-f4533fb5d605-kube-api-access-d8xdq\") pod \"dnsmasq-dns-55f844cf75-4fqs2\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.780317 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-ovndb-tls-certs\") pod \"neutron-86dc7995bd-76xtf\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.780407 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-config\") pod \"neutron-86dc7995bd-76xtf\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.780507 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57t9l\" (UniqueName: \"kubernetes.io/projected/140a26a7-0308-4d50-b2ec-d2e55be6b812-kube-api-access-57t9l\") pod \"neutron-86dc7995bd-76xtf\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.780532 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-httpd-config\") pod \"neutron-86dc7995bd-76xtf\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.780612 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-combined-ca-bundle\") pod \"neutron-86dc7995bd-76xtf\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.800906 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-config\") pod \"neutron-86dc7995bd-76xtf\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.806784 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-httpd-config\") pod \"neutron-86dc7995bd-76xtf\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.806994 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-combined-ca-bundle\") pod \"neutron-86dc7995bd-76xtf\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.810381 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-ovndb-tls-certs\") pod \"neutron-86dc7995bd-76xtf\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.815095 4689 scope.go:117] "RemoveContainer" containerID="d92b8f3891e2810eea5ec9bba43ca2571fec7d80219635e9e753d63a03b94a13" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.841895 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57t9l\" (UniqueName: \"kubernetes.io/projected/140a26a7-0308-4d50-b2ec-d2e55be6b812-kube-api-access-57t9l\") pod \"neutron-86dc7995bd-76xtf\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.879473 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:44 crc kubenswrapper[4689]: I1013 21:27:44.957257 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:45 crc kubenswrapper[4689]: I1013 21:27:45.095490 4689 scope.go:117] "RemoveContainer" containerID="89057d727fd0c0d650ec83c4141afb37e1a87c1cf9a929f2f7a5fd9706691ff1" Oct 13 21:27:45 crc kubenswrapper[4689]: I1013 21:27:45.372413 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-8lmgj" event={"ID":"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579","Type":"ContainerStarted","Data":"7e8165bd07c21504d4ad95368f4e7c5d1ed2d2462e6bae8587b19d39ec43ce7d"} Oct 13 21:27:45 crc kubenswrapper[4689]: I1013 21:27:45.400839 4689 scope.go:117] "RemoveContainer" containerID="178496e8e51f9fc6b23684c0babc353897234e15ac7624aaaaf2a3bba1211455" Oct 13 21:27:45 crc kubenswrapper[4689]: I1013 21:27:45.401792 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-8lmgj" podStartSLOduration=7.877983835 podStartE2EDuration="28.401774412s" podCreationTimestamp="2025-10-13 21:27:17 +0000 UTC" firstStartedPulling="2025-10-13 21:27:22.587685975 +0000 UTC m=+959.505931060" lastFinishedPulling="2025-10-13 21:27:43.111476552 +0000 UTC m=+980.029721637" observedRunningTime="2025-10-13 21:27:45.40126128 +0000 UTC m=+982.319506385" watchObservedRunningTime="2025-10-13 21:27:45.401774412 +0000 UTC m=+982.320019497" Oct 13 21:27:45 crc kubenswrapper[4689]: I1013 21:27:45.409810 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7d89fff484-q9fvk" event={"ID":"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5","Type":"ContainerStarted","Data":"fb07406342d91c33e14eec1b6b222944fa14d521d6b067273be229514fb83d08"} Oct 13 21:27:45 crc kubenswrapper[4689]: I1013 21:27:45.423691 4689 generic.go:334] "Generic (PLEG): container finished" podID="a37ccb01-d27b-43e7-bf7d-902a962053f4" containerID="4444c7113b201137a9d7543efe4c710bd829594e77e68b6b7c121d0865e0823a" exitCode=2 Oct 13 21:27:45 crc kubenswrapper[4689]: I1013 21:27:45.423716 4689 generic.go:334] "Generic (PLEG): container finished" podID="a37ccb01-d27b-43e7-bf7d-902a962053f4" containerID="8cbbfa6c2cf50aa25c9179fc5873db4c670f6fa3395f19d3e44f620a081a4d23" exitCode=0 Oct 13 21:27:45 crc kubenswrapper[4689]: I1013 21:27:45.423757 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a37ccb01-d27b-43e7-bf7d-902a962053f4","Type":"ContainerDied","Data":"4444c7113b201137a9d7543efe4c710bd829594e77e68b6b7c121d0865e0823a"} Oct 13 21:27:45 crc kubenswrapper[4689]: I1013 21:27:45.423779 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a37ccb01-d27b-43e7-bf7d-902a962053f4","Type":"ContainerDied","Data":"8cbbfa6c2cf50aa25c9179fc5873db4c670f6fa3395f19d3e44f620a081a4d23"} Oct 13 21:27:45 crc kubenswrapper[4689]: I1013 21:27:45.745180 4689 scope.go:117] "RemoveContainer" containerID="81c20e55b5748fa6821d7803f8849af873c2ad22493e84bee684a4cc924c09dd" Oct 13 21:27:45 crc kubenswrapper[4689]: I1013 21:27:45.764822 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-4fqs2"] Oct 13 21:27:45 crc kubenswrapper[4689]: I1013 21:27:45.882643 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c0aa883-db89-41fe-b956-cbb3994efcc7" path="/var/lib/kubelet/pods/5c0aa883-db89-41fe-b956-cbb3994efcc7/volumes" Oct 13 21:27:45 crc kubenswrapper[4689]: I1013 21:27:45.888931 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f582acd-f393-4d97-a002-186f0ccb1c86" path="/var/lib/kubelet/pods/5f582acd-f393-4d97-a002-186f0ccb1c86/volumes" Oct 13 21:27:45 crc kubenswrapper[4689]: I1013 21:27:45.890355 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b374e925-6d17-4ef2-8e3b-561b0a8b6a8f" path="/var/lib/kubelet/pods/b374e925-6d17-4ef2-8e3b-561b0a8b6a8f/volumes" Oct 13 21:27:45 crc kubenswrapper[4689]: I1013 21:27:45.921678 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-86dc7995bd-76xtf"] Oct 13 21:27:45 crc kubenswrapper[4689]: W1013 21:27:45.953936 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod140a26a7_0308_4d50_b2ec_d2e55be6b812.slice/crio-2ed6598d54ce854ba84448b5dc59a5d887382e2342ad4f310a6b4b9ee537b7fb WatchSource:0}: Error finding container 2ed6598d54ce854ba84448b5dc59a5d887382e2342ad4f310a6b4b9ee537b7fb: Status 404 returned error can't find the container with id 2ed6598d54ce854ba84448b5dc59a5d887382e2342ad4f310a6b4b9ee537b7fb Oct 13 21:27:46 crc kubenswrapper[4689]: I1013 21:27:46.329464 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:27:46 crc kubenswrapper[4689]: I1013 21:27:46.438441 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" event={"ID":"836053c1-e5a4-4e49-a688-f4533fb5d605","Type":"ContainerStarted","Data":"dde12c4dc2e1463d0afc2d940ca47ee61e0d29e481a3e5267a7ff51e6cc9e67a"} Oct 13 21:27:46 crc kubenswrapper[4689]: I1013 21:27:46.439908 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-86dc7995bd-76xtf" event={"ID":"140a26a7-0308-4d50-b2ec-d2e55be6b812","Type":"ContainerStarted","Data":"2ed6598d54ce854ba84448b5dc59a5d887382e2342ad4f310a6b4b9ee537b7fb"} Oct 13 21:27:46 crc kubenswrapper[4689]: I1013 21:27:46.472834 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-56b8966ffb-99krc" Oct 13 21:27:46 crc kubenswrapper[4689]: I1013 21:27:46.551635 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-795fd646bb-2s89l"] Oct 13 21:27:46 crc kubenswrapper[4689]: I1013 21:27:46.554254 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-795fd646bb-2s89l" podUID="fd448066-1b70-4e35-959c-5c702d87560f" containerName="horizon-log" containerID="cri-o://0508f3a0b3e63cabdacf7e4be5cf5ff224833a9698433874f6b8ed378530891e" gracePeriod=30 Oct 13 21:27:46 crc kubenswrapper[4689]: I1013 21:27:46.554428 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-795fd646bb-2s89l" podUID="fd448066-1b70-4e35-959c-5c702d87560f" containerName="horizon" containerID="cri-o://5d810c468ab93d2b6a2b55e29dbd0f0877bd15323cdf71fdbf9ad1a0b002bf83" gracePeriod=30 Oct 13 21:27:46 crc kubenswrapper[4689]: I1013 21:27:46.875318 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-9b98684c9-9h5ml"] Oct 13 21:27:46 crc kubenswrapper[4689]: I1013 21:27:46.915694 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-9b98684c9-9h5ml"] Oct 13 21:27:46 crc kubenswrapper[4689]: I1013 21:27:46.915863 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:46 crc kubenswrapper[4689]: I1013 21:27:46.919111 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 13 21:27:46 crc kubenswrapper[4689]: I1013 21:27:46.920746 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.033821 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-httpd-config\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.033887 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-public-tls-certs\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.033950 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-config\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.034023 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-combined-ca-bundle\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.034046 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dz4gt\" (UniqueName: \"kubernetes.io/projected/4d52e532-8731-4838-9e3b-e316a722a0a6-kube-api-access-dz4gt\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.034219 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-internal-tls-certs\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.034299 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-ovndb-tls-certs\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.135878 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-httpd-config\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.135932 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-public-tls-certs\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.135971 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-config\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.136012 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-combined-ca-bundle\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.136039 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dz4gt\" (UniqueName: \"kubernetes.io/projected/4d52e532-8731-4838-9e3b-e316a722a0a6-kube-api-access-dz4gt\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.136084 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-internal-tls-certs\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.136104 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-ovndb-tls-certs\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.167978 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-public-tls-certs\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.168351 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-internal-tls-certs\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.168429 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-combined-ca-bundle\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.169223 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-ovndb-tls-certs\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.169323 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-httpd-config\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.169355 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/4d52e532-8731-4838-9e3b-e316a722a0a6-config\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.176536 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dz4gt\" (UniqueName: \"kubernetes.io/projected/4d52e532-8731-4838-9e3b-e316a722a0a6-kube-api-access-dz4gt\") pod \"neutron-9b98684c9-9h5ml\" (UID: \"4d52e532-8731-4838-9e3b-e316a722a0a6\") " pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.249571 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.452009 4689 generic.go:334] "Generic (PLEG): container finished" podID="a37ccb01-d27b-43e7-bf7d-902a962053f4" containerID="0dd8099122cbe47ada37da4eaf5c5ccdf760a7dc9073d5934f7e647ab2009cbd" exitCode=0 Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.452049 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a37ccb01-d27b-43e7-bf7d-902a962053f4","Type":"ContainerDied","Data":"0dd8099122cbe47ada37da4eaf5c5ccdf760a7dc9073d5934f7e647ab2009cbd"} Oct 13 21:27:47 crc kubenswrapper[4689]: I1013 21:27:47.802952 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-9b98684c9-9h5ml"] Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.271373 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.364700 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a37ccb01-d27b-43e7-bf7d-902a962053f4-log-httpd\") pod \"a37ccb01-d27b-43e7-bf7d-902a962053f4\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.364803 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfx2w\" (UniqueName: \"kubernetes.io/projected/a37ccb01-d27b-43e7-bf7d-902a962053f4-kube-api-access-kfx2w\") pod \"a37ccb01-d27b-43e7-bf7d-902a962053f4\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.364982 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a37ccb01-d27b-43e7-bf7d-902a962053f4-run-httpd\") pod \"a37ccb01-d27b-43e7-bf7d-902a962053f4\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.365111 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-combined-ca-bundle\") pod \"a37ccb01-d27b-43e7-bf7d-902a962053f4\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.365190 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-scripts\") pod \"a37ccb01-d27b-43e7-bf7d-902a962053f4\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.364994 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a37ccb01-d27b-43e7-bf7d-902a962053f4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a37ccb01-d27b-43e7-bf7d-902a962053f4" (UID: "a37ccb01-d27b-43e7-bf7d-902a962053f4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.365197 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a37ccb01-d27b-43e7-bf7d-902a962053f4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a37ccb01-d27b-43e7-bf7d-902a962053f4" (UID: "a37ccb01-d27b-43e7-bf7d-902a962053f4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.365250 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-config-data\") pod \"a37ccb01-d27b-43e7-bf7d-902a962053f4\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.365337 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-sg-core-conf-yaml\") pod \"a37ccb01-d27b-43e7-bf7d-902a962053f4\" (UID: \"a37ccb01-d27b-43e7-bf7d-902a962053f4\") " Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.366016 4689 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a37ccb01-d27b-43e7-bf7d-902a962053f4-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.366044 4689 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a37ccb01-d27b-43e7-bf7d-902a962053f4-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.369215 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a37ccb01-d27b-43e7-bf7d-902a962053f4-kube-api-access-kfx2w" (OuterVolumeSpecName: "kube-api-access-kfx2w") pod "a37ccb01-d27b-43e7-bf7d-902a962053f4" (UID: "a37ccb01-d27b-43e7-bf7d-902a962053f4"). InnerVolumeSpecName "kube-api-access-kfx2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.372506 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-scripts" (OuterVolumeSpecName: "scripts") pod "a37ccb01-d27b-43e7-bf7d-902a962053f4" (UID: "a37ccb01-d27b-43e7-bf7d-902a962053f4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.469953 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.471106 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfx2w\" (UniqueName: \"kubernetes.io/projected/a37ccb01-d27b-43e7-bf7d-902a962053f4-kube-api-access-kfx2w\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.494437 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9b98684c9-9h5ml" event={"ID":"4d52e532-8731-4838-9e3b-e316a722a0a6","Type":"ContainerStarted","Data":"372bb8c08ac6e9d095d5f0f034b8933cd42509e519483b2fa2eb9e0a35aa3eb8"} Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.494483 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9b98684c9-9h5ml" event={"ID":"4d52e532-8731-4838-9e3b-e316a722a0a6","Type":"ContainerStarted","Data":"cf48b7c38aa84d28587632f9870a69da0b8a06e8c1fc42bc0fcad0c2cc0e2953"} Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.505788 4689 generic.go:334] "Generic (PLEG): container finished" podID="836053c1-e5a4-4e49-a688-f4533fb5d605" containerID="566bc8031bb1904d28a8edb7d85204aa303769d45875bb7e4e3e79f919fa0049" exitCode=0 Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.505854 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" event={"ID":"836053c1-e5a4-4e49-a688-f4533fb5d605","Type":"ContainerDied","Data":"566bc8031bb1904d28a8edb7d85204aa303769d45875bb7e4e3e79f919fa0049"} Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.540665 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a37ccb01-d27b-43e7-bf7d-902a962053f4","Type":"ContainerDied","Data":"02bf9fbb4b3a9ae0b1c38fd211e9d57319e4a6b45701c17d3359dca2922e4c38"} Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.540998 4689 scope.go:117] "RemoveContainer" containerID="4444c7113b201137a9d7543efe4c710bd829594e77e68b6b7c121d0865e0823a" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.541135 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.579564 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-86dc7995bd-76xtf" event={"ID":"140a26a7-0308-4d50-b2ec-d2e55be6b812","Type":"ContainerStarted","Data":"3d9c65c1bce1f25279f529b62b6ec8e12128ae3528a65493d767422116dd67e9"} Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.579621 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-86dc7995bd-76xtf" event={"ID":"140a26a7-0308-4d50-b2ec-d2e55be6b812","Type":"ContainerStarted","Data":"37d73f863fb58767978048393ee9d0ff0b55db0ac178ce34413ea96fce4a44b8"} Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.580309 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.600382 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7d89fff484-q9fvk" event={"ID":"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5","Type":"ContainerStarted","Data":"606a6182e79874c186e1c0c8f47a3fbacbc5afddc2c56012fdf857d8ffa228e9"} Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.600468 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7d89fff484-q9fvk" event={"ID":"3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5","Type":"ContainerStarted","Data":"1453ecd6945adde49ebb5989f19b615f2335bd320ab80a8211a2d91d5beca488"} Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.601727 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.601782 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.614909 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-xsdlt" event={"ID":"2c5019e8-86d0-4324-bffc-70583983b377","Type":"ContainerStarted","Data":"f793658662f1d70ea731e85d48cd1c012dab43f36fbdf77b6d23d5bc801a94a7"} Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.649456 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a37ccb01-d27b-43e7-bf7d-902a962053f4" (UID: "a37ccb01-d27b-43e7-bf7d-902a962053f4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.666264 4689 scope.go:117] "RemoveContainer" containerID="0dd8099122cbe47ada37da4eaf5c5ccdf760a7dc9073d5934f7e647ab2009cbd" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.682073 4689 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.684434 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-config-data" (OuterVolumeSpecName: "config-data") pod "a37ccb01-d27b-43e7-bf7d-902a962053f4" (UID: "a37ccb01-d27b-43e7-bf7d-902a962053f4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.691903 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-xsdlt" podStartSLOduration=10.137736514 podStartE2EDuration="31.691888735s" podCreationTimestamp="2025-10-13 21:27:17 +0000 UTC" firstStartedPulling="2025-10-13 21:27:22.321075519 +0000 UTC m=+959.239320604" lastFinishedPulling="2025-10-13 21:27:43.87522774 +0000 UTC m=+980.793472825" observedRunningTime="2025-10-13 21:27:48.686800224 +0000 UTC m=+985.605045309" watchObservedRunningTime="2025-10-13 21:27:48.691888735 +0000 UTC m=+985.610133820" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.700637 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-86dc7995bd-76xtf" podStartSLOduration=4.700620151 podStartE2EDuration="4.700620151s" podCreationTimestamp="2025-10-13 21:27:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:27:48.646892312 +0000 UTC m=+985.565137397" watchObservedRunningTime="2025-10-13 21:27:48.700620151 +0000 UTC m=+985.618865236" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.708724 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a37ccb01-d27b-43e7-bf7d-902a962053f4" (UID: "a37ccb01-d27b-43e7-bf7d-902a962053f4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.725483 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-7d89fff484-q9fvk" podStartSLOduration=18.725465878 podStartE2EDuration="18.725465878s" podCreationTimestamp="2025-10-13 21:27:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:27:48.725004526 +0000 UTC m=+985.643249611" watchObservedRunningTime="2025-10-13 21:27:48.725465878 +0000 UTC m=+985.643710963" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.753747 4689 scope.go:117] "RemoveContainer" containerID="8cbbfa6c2cf50aa25c9179fc5873db4c670f6fa3395f19d3e44f620a081a4d23" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.783900 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.783933 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a37ccb01-d27b-43e7-bf7d-902a962053f4-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.907232 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.935083 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.956359 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6f78847c76-48zjm" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.987864 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:27:48 crc kubenswrapper[4689]: E1013 21:27:48.988894 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a37ccb01-d27b-43e7-bf7d-902a962053f4" containerName="ceilometer-notification-agent" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.988973 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="a37ccb01-d27b-43e7-bf7d-902a962053f4" containerName="ceilometer-notification-agent" Oct 13 21:27:48 crc kubenswrapper[4689]: E1013 21:27:48.989490 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a37ccb01-d27b-43e7-bf7d-902a962053f4" containerName="sg-core" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.989552 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="a37ccb01-d27b-43e7-bf7d-902a962053f4" containerName="sg-core" Oct 13 21:27:48 crc kubenswrapper[4689]: E1013 21:27:48.989826 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a37ccb01-d27b-43e7-bf7d-902a962053f4" containerName="ceilometer-central-agent" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.989897 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="a37ccb01-d27b-43e7-bf7d-902a962053f4" containerName="ceilometer-central-agent" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.990846 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="a37ccb01-d27b-43e7-bf7d-902a962053f4" containerName="sg-core" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.990910 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="a37ccb01-d27b-43e7-bf7d-902a962053f4" containerName="ceilometer-notification-agent" Oct 13 21:27:48 crc kubenswrapper[4689]: I1013 21:27:48.990920 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="a37ccb01-d27b-43e7-bf7d-902a962053f4" containerName="ceilometer-central-agent" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.022474 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.029440 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.030877 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.030893 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.095314 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.095469 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-config-data\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.095500 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.095554 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e13bd179-aefc-4620-92fd-d4563d7b7f72-log-httpd\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.095613 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llmn6\" (UniqueName: \"kubernetes.io/projected/e13bd179-aefc-4620-92fd-d4563d7b7f72-kube-api-access-llmn6\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.095663 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-scripts\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.095745 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e13bd179-aefc-4620-92fd-d4563d7b7f72-run-httpd\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.197939 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e13bd179-aefc-4620-92fd-d4563d7b7f72-log-httpd\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.198023 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llmn6\" (UniqueName: \"kubernetes.io/projected/e13bd179-aefc-4620-92fd-d4563d7b7f72-kube-api-access-llmn6\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.198069 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-scripts\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.198167 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e13bd179-aefc-4620-92fd-d4563d7b7f72-run-httpd\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.198213 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.198283 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-config-data\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.198320 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.198424 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e13bd179-aefc-4620-92fd-d4563d7b7f72-log-httpd\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.198703 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e13bd179-aefc-4620-92fd-d4563d7b7f72-run-httpd\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.204132 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.208471 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-config-data\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.217982 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-scripts\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.218136 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.222765 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llmn6\" (UniqueName: \"kubernetes.io/projected/e13bd179-aefc-4620-92fd-d4563d7b7f72-kube-api-access-llmn6\") pod \"ceilometer-0\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.364255 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.635220 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9b98684c9-9h5ml" event={"ID":"4d52e532-8731-4838-9e3b-e316a722a0a6","Type":"ContainerStarted","Data":"4846d0e74adf6988b8d6ec983bd5e71694f551fa614834a8da213e92bbb95d58"} Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.635893 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.645510 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" event={"ID":"836053c1-e5a4-4e49-a688-f4533fb5d605","Type":"ContainerStarted","Data":"492de78ead55251f430989d7c2561a4e4b38323e5621db9e317d020cb95a3a9c"} Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.645561 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.685706 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-9b98684c9-9h5ml" podStartSLOduration=3.685689076 podStartE2EDuration="3.685689076s" podCreationTimestamp="2025-10-13 21:27:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:27:49.678000414 +0000 UTC m=+986.596245499" watchObservedRunningTime="2025-10-13 21:27:49.685689076 +0000 UTC m=+986.603934161" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.702300 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" podStartSLOduration=5.702283187 podStartE2EDuration="5.702283187s" podCreationTimestamp="2025-10-13 21:27:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:27:49.697432113 +0000 UTC m=+986.615677198" watchObservedRunningTime="2025-10-13 21:27:49.702283187 +0000 UTC m=+986.620528272" Oct 13 21:27:49 crc kubenswrapper[4689]: W1013 21:27:49.879022 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode13bd179_aefc_4620_92fd_d4563d7b7f72.slice/crio-e781d7e391e45be04fe0348717b96a851f890697a3d63575bf6f6006924fff21 WatchSource:0}: Error finding container e781d7e391e45be04fe0348717b96a851f890697a3d63575bf6f6006924fff21: Status 404 returned error can't find the container with id e781d7e391e45be04fe0348717b96a851f890697a3d63575bf6f6006924fff21 Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.886852 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a37ccb01-d27b-43e7-bf7d-902a962053f4" path="/var/lib/kubelet/pods/a37ccb01-d27b-43e7-bf7d-902a962053f4/volumes" Oct 13 21:27:49 crc kubenswrapper[4689]: I1013 21:27:49.892495 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:27:50 crc kubenswrapper[4689]: I1013 21:27:50.666872 4689 generic.go:334] "Generic (PLEG): container finished" podID="33ebfe99-ee3b-49f0-9ffe-1bad91c2f579" containerID="7e8165bd07c21504d4ad95368f4e7c5d1ed2d2462e6bae8587b19d39ec43ce7d" exitCode=0 Oct 13 21:27:50 crc kubenswrapper[4689]: I1013 21:27:50.666930 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-8lmgj" event={"ID":"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579","Type":"ContainerDied","Data":"7e8165bd07c21504d4ad95368f4e7c5d1ed2d2462e6bae8587b19d39ec43ce7d"} Oct 13 21:27:50 crc kubenswrapper[4689]: I1013 21:27:50.669645 4689 generic.go:334] "Generic (PLEG): container finished" podID="fd448066-1b70-4e35-959c-5c702d87560f" containerID="5d810c468ab93d2b6a2b55e29dbd0f0877bd15323cdf71fdbf9ad1a0b002bf83" exitCode=0 Oct 13 21:27:50 crc kubenswrapper[4689]: I1013 21:27:50.669727 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-795fd646bb-2s89l" event={"ID":"fd448066-1b70-4e35-959c-5c702d87560f","Type":"ContainerDied","Data":"5d810c468ab93d2b6a2b55e29dbd0f0877bd15323cdf71fdbf9ad1a0b002bf83"} Oct 13 21:27:50 crc kubenswrapper[4689]: I1013 21:27:50.671086 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e13bd179-aefc-4620-92fd-d4563d7b7f72","Type":"ContainerStarted","Data":"6c985bac0347f948e8801a1b28c777b0e021a335328f39d47991fef6fa76e953"} Oct 13 21:27:50 crc kubenswrapper[4689]: I1013 21:27:50.671132 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e13bd179-aefc-4620-92fd-d4563d7b7f72","Type":"ContainerStarted","Data":"e781d7e391e45be04fe0348717b96a851f890697a3d63575bf6f6006924fff21"} Oct 13 21:27:50 crc kubenswrapper[4689]: I1013 21:27:50.974974 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 13 21:27:50 crc kubenswrapper[4689]: I1013 21:27:50.976294 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 13 21:27:50 crc kubenswrapper[4689]: I1013 21:27:50.979821 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 13 21:27:50 crc kubenswrapper[4689]: I1013 21:27:50.980012 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-zgd9x" Oct 13 21:27:50 crc kubenswrapper[4689]: I1013 21:27:50.984112 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 13 21:27:51 crc kubenswrapper[4689]: I1013 21:27:51.010463 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 13 21:27:51 crc kubenswrapper[4689]: I1013 21:27:51.143155 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/10f0cb83-9cb9-48d1-8b9e-2217c48790d9-openstack-config-secret\") pod \"openstackclient\" (UID: \"10f0cb83-9cb9-48d1-8b9e-2217c48790d9\") " pod="openstack/openstackclient" Oct 13 21:27:51 crc kubenswrapper[4689]: I1013 21:27:51.143245 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10f0cb83-9cb9-48d1-8b9e-2217c48790d9-combined-ca-bundle\") pod \"openstackclient\" (UID: \"10f0cb83-9cb9-48d1-8b9e-2217c48790d9\") " pod="openstack/openstackclient" Oct 13 21:27:51 crc kubenswrapper[4689]: I1013 21:27:51.143324 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rcqf\" (UniqueName: \"kubernetes.io/projected/10f0cb83-9cb9-48d1-8b9e-2217c48790d9-kube-api-access-6rcqf\") pod \"openstackclient\" (UID: \"10f0cb83-9cb9-48d1-8b9e-2217c48790d9\") " pod="openstack/openstackclient" Oct 13 21:27:51 crc kubenswrapper[4689]: I1013 21:27:51.143372 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/10f0cb83-9cb9-48d1-8b9e-2217c48790d9-openstack-config\") pod \"openstackclient\" (UID: \"10f0cb83-9cb9-48d1-8b9e-2217c48790d9\") " pod="openstack/openstackclient" Oct 13 21:27:51 crc kubenswrapper[4689]: I1013 21:27:51.245813 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rcqf\" (UniqueName: \"kubernetes.io/projected/10f0cb83-9cb9-48d1-8b9e-2217c48790d9-kube-api-access-6rcqf\") pod \"openstackclient\" (UID: \"10f0cb83-9cb9-48d1-8b9e-2217c48790d9\") " pod="openstack/openstackclient" Oct 13 21:27:51 crc kubenswrapper[4689]: I1013 21:27:51.246360 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/10f0cb83-9cb9-48d1-8b9e-2217c48790d9-openstack-config\") pod \"openstackclient\" (UID: \"10f0cb83-9cb9-48d1-8b9e-2217c48790d9\") " pod="openstack/openstackclient" Oct 13 21:27:51 crc kubenswrapper[4689]: I1013 21:27:51.246513 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/10f0cb83-9cb9-48d1-8b9e-2217c48790d9-openstack-config-secret\") pod \"openstackclient\" (UID: \"10f0cb83-9cb9-48d1-8b9e-2217c48790d9\") " pod="openstack/openstackclient" Oct 13 21:27:51 crc kubenswrapper[4689]: I1013 21:27:51.246573 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10f0cb83-9cb9-48d1-8b9e-2217c48790d9-combined-ca-bundle\") pod \"openstackclient\" (UID: \"10f0cb83-9cb9-48d1-8b9e-2217c48790d9\") " pod="openstack/openstackclient" Oct 13 21:27:51 crc kubenswrapper[4689]: I1013 21:27:51.247220 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/10f0cb83-9cb9-48d1-8b9e-2217c48790d9-openstack-config\") pod \"openstackclient\" (UID: \"10f0cb83-9cb9-48d1-8b9e-2217c48790d9\") " pod="openstack/openstackclient" Oct 13 21:27:51 crc kubenswrapper[4689]: I1013 21:27:51.255444 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10f0cb83-9cb9-48d1-8b9e-2217c48790d9-combined-ca-bundle\") pod \"openstackclient\" (UID: \"10f0cb83-9cb9-48d1-8b9e-2217c48790d9\") " pod="openstack/openstackclient" Oct 13 21:27:51 crc kubenswrapper[4689]: I1013 21:27:51.264356 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/10f0cb83-9cb9-48d1-8b9e-2217c48790d9-openstack-config-secret\") pod \"openstackclient\" (UID: \"10f0cb83-9cb9-48d1-8b9e-2217c48790d9\") " pod="openstack/openstackclient" Oct 13 21:27:51 crc kubenswrapper[4689]: I1013 21:27:51.268533 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rcqf\" (UniqueName: \"kubernetes.io/projected/10f0cb83-9cb9-48d1-8b9e-2217c48790d9-kube-api-access-6rcqf\") pod \"openstackclient\" (UID: \"10f0cb83-9cb9-48d1-8b9e-2217c48790d9\") " pod="openstack/openstackclient" Oct 13 21:27:51 crc kubenswrapper[4689]: I1013 21:27:51.314370 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 13 21:27:51 crc kubenswrapper[4689]: I1013 21:27:51.630983 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-795fd646bb-2s89l" podUID="fd448066-1b70-4e35-959c-5c702d87560f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Oct 13 21:27:51 crc kubenswrapper[4689]: I1013 21:27:51.835416 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 13 21:27:51 crc kubenswrapper[4689]: W1013 21:27:51.867355 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10f0cb83_9cb9_48d1_8b9e_2217c48790d9.slice/crio-5b08acff8923eb7c25584f9c6a08add112ea9cabfc08e1acdb0f110f111a2171 WatchSource:0}: Error finding container 5b08acff8923eb7c25584f9c6a08add112ea9cabfc08e1acdb0f110f111a2171: Status 404 returned error can't find the container with id 5b08acff8923eb7c25584f9c6a08add112ea9cabfc08e1acdb0f110f111a2171 Oct 13 21:27:52 crc kubenswrapper[4689]: I1013 21:27:52.213518 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-8lmgj" Oct 13 21:27:52 crc kubenswrapper[4689]: I1013 21:27:52.278977 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-db-sync-config-data\") pod \"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579\" (UID: \"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579\") " Oct 13 21:27:52 crc kubenswrapper[4689]: I1013 21:27:52.279378 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6bx6\" (UniqueName: \"kubernetes.io/projected/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-kube-api-access-t6bx6\") pod \"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579\" (UID: \"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579\") " Oct 13 21:27:52 crc kubenswrapper[4689]: I1013 21:27:52.279519 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-combined-ca-bundle\") pod \"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579\" (UID: \"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579\") " Oct 13 21:27:52 crc kubenswrapper[4689]: I1013 21:27:52.289806 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "33ebfe99-ee3b-49f0-9ffe-1bad91c2f579" (UID: "33ebfe99-ee3b-49f0-9ffe-1bad91c2f579"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:52 crc kubenswrapper[4689]: I1013 21:27:52.289953 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-kube-api-access-t6bx6" (OuterVolumeSpecName: "kube-api-access-t6bx6") pod "33ebfe99-ee3b-49f0-9ffe-1bad91c2f579" (UID: "33ebfe99-ee3b-49f0-9ffe-1bad91c2f579"). InnerVolumeSpecName "kube-api-access-t6bx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:27:52 crc kubenswrapper[4689]: I1013 21:27:52.315820 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "33ebfe99-ee3b-49f0-9ffe-1bad91c2f579" (UID: "33ebfe99-ee3b-49f0-9ffe-1bad91c2f579"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:52 crc kubenswrapper[4689]: I1013 21:27:52.383417 4689 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:52 crc kubenswrapper[4689]: I1013 21:27:52.383466 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6bx6\" (UniqueName: \"kubernetes.io/projected/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-kube-api-access-t6bx6\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:52 crc kubenswrapper[4689]: I1013 21:27:52.383481 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:52 crc kubenswrapper[4689]: I1013 21:27:52.690246 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"10f0cb83-9cb9-48d1-8b9e-2217c48790d9","Type":"ContainerStarted","Data":"5b08acff8923eb7c25584f9c6a08add112ea9cabfc08e1acdb0f110f111a2171"} Oct 13 21:27:52 crc kubenswrapper[4689]: I1013 21:27:52.692359 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-8lmgj" event={"ID":"33ebfe99-ee3b-49f0-9ffe-1bad91c2f579","Type":"ContainerDied","Data":"e603df4d22ae08ce0f30e8e821b2bd71381d745ab78bda7bc81f0f66d9f9e6a6"} Oct 13 21:27:52 crc kubenswrapper[4689]: I1013 21:27:52.692460 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e603df4d22ae08ce0f30e8e821b2bd71381d745ab78bda7bc81f0f66d9f9e6a6" Oct 13 21:27:52 crc kubenswrapper[4689]: I1013 21:27:52.692469 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-8lmgj" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.033951 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5f6bb4646c-c8vt4"] Oct 13 21:27:53 crc kubenswrapper[4689]: E1013 21:27:53.034992 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33ebfe99-ee3b-49f0-9ffe-1bad91c2f579" containerName="barbican-db-sync" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.035011 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="33ebfe99-ee3b-49f0-9ffe-1bad91c2f579" containerName="barbican-db-sync" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.035249 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="33ebfe99-ee3b-49f0-9ffe-1bad91c2f579" containerName="barbican-db-sync" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.036505 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5f6bb4646c-c8vt4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.046313 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-7gbvh" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.046661 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.048988 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.057801 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-7bb46f45d-mgdw4"] Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.060108 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.064872 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.072085 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5f6bb4646c-c8vt4"] Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.082019 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7bb46f45d-mgdw4"] Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.119516 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wg5vp\" (UniqueName: \"kubernetes.io/projected/94bcd39d-bca7-4d51-9327-aec08e22b60a-kube-api-access-wg5vp\") pod \"barbican-keystone-listener-7bb46f45d-mgdw4\" (UID: \"94bcd39d-bca7-4d51-9327-aec08e22b60a\") " pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.119616 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be6e9b94-6d9a-46ae-ae15-5d9516e4ee47-combined-ca-bundle\") pod \"barbican-worker-5f6bb4646c-c8vt4\" (UID: \"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47\") " pod="openstack/barbican-worker-5f6bb4646c-c8vt4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.119692 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be6e9b94-6d9a-46ae-ae15-5d9516e4ee47-config-data\") pod \"barbican-worker-5f6bb4646c-c8vt4\" (UID: \"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47\") " pod="openstack/barbican-worker-5f6bb4646c-c8vt4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.119719 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94bcd39d-bca7-4d51-9327-aec08e22b60a-combined-ca-bundle\") pod \"barbican-keystone-listener-7bb46f45d-mgdw4\" (UID: \"94bcd39d-bca7-4d51-9327-aec08e22b60a\") " pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.119750 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94bcd39d-bca7-4d51-9327-aec08e22b60a-config-data-custom\") pod \"barbican-keystone-listener-7bb46f45d-mgdw4\" (UID: \"94bcd39d-bca7-4d51-9327-aec08e22b60a\") " pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.119776 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be6e9b94-6d9a-46ae-ae15-5d9516e4ee47-logs\") pod \"barbican-worker-5f6bb4646c-c8vt4\" (UID: \"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47\") " pod="openstack/barbican-worker-5f6bb4646c-c8vt4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.119798 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgrfp\" (UniqueName: \"kubernetes.io/projected/be6e9b94-6d9a-46ae-ae15-5d9516e4ee47-kube-api-access-kgrfp\") pod \"barbican-worker-5f6bb4646c-c8vt4\" (UID: \"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47\") " pod="openstack/barbican-worker-5f6bb4646c-c8vt4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.119820 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/be6e9b94-6d9a-46ae-ae15-5d9516e4ee47-config-data-custom\") pod \"barbican-worker-5f6bb4646c-c8vt4\" (UID: \"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47\") " pod="openstack/barbican-worker-5f6bb4646c-c8vt4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.119848 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94bcd39d-bca7-4d51-9327-aec08e22b60a-config-data\") pod \"barbican-keystone-listener-7bb46f45d-mgdw4\" (UID: \"94bcd39d-bca7-4d51-9327-aec08e22b60a\") " pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.119875 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94bcd39d-bca7-4d51-9327-aec08e22b60a-logs\") pod \"barbican-keystone-listener-7bb46f45d-mgdw4\" (UID: \"94bcd39d-bca7-4d51-9327-aec08e22b60a\") " pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.166868 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-4fqs2"] Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.167377 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" podUID="836053c1-e5a4-4e49-a688-f4533fb5d605" containerName="dnsmasq-dns" containerID="cri-o://492de78ead55251f430989d7c2561a4e4b38323e5621db9e317d020cb95a3a9c" gracePeriod=10 Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.173800 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.229139 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-bzrvq"] Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.231449 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-bzrvq"] Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.231578 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.237797 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wg5vp\" (UniqueName: \"kubernetes.io/projected/94bcd39d-bca7-4d51-9327-aec08e22b60a-kube-api-access-wg5vp\") pod \"barbican-keystone-listener-7bb46f45d-mgdw4\" (UID: \"94bcd39d-bca7-4d51-9327-aec08e22b60a\") " pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.240652 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be6e9b94-6d9a-46ae-ae15-5d9516e4ee47-combined-ca-bundle\") pod \"barbican-worker-5f6bb4646c-c8vt4\" (UID: \"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47\") " pod="openstack/barbican-worker-5f6bb4646c-c8vt4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.240903 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be6e9b94-6d9a-46ae-ae15-5d9516e4ee47-config-data\") pod \"barbican-worker-5f6bb4646c-c8vt4\" (UID: \"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47\") " pod="openstack/barbican-worker-5f6bb4646c-c8vt4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.240985 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94bcd39d-bca7-4d51-9327-aec08e22b60a-combined-ca-bundle\") pod \"barbican-keystone-listener-7bb46f45d-mgdw4\" (UID: \"94bcd39d-bca7-4d51-9327-aec08e22b60a\") " pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.241101 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94bcd39d-bca7-4d51-9327-aec08e22b60a-config-data-custom\") pod \"barbican-keystone-listener-7bb46f45d-mgdw4\" (UID: \"94bcd39d-bca7-4d51-9327-aec08e22b60a\") " pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.241198 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be6e9b94-6d9a-46ae-ae15-5d9516e4ee47-logs\") pod \"barbican-worker-5f6bb4646c-c8vt4\" (UID: \"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47\") " pod="openstack/barbican-worker-5f6bb4646c-c8vt4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.241255 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgrfp\" (UniqueName: \"kubernetes.io/projected/be6e9b94-6d9a-46ae-ae15-5d9516e4ee47-kube-api-access-kgrfp\") pod \"barbican-worker-5f6bb4646c-c8vt4\" (UID: \"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47\") " pod="openstack/barbican-worker-5f6bb4646c-c8vt4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.241285 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/be6e9b94-6d9a-46ae-ae15-5d9516e4ee47-config-data-custom\") pod \"barbican-worker-5f6bb4646c-c8vt4\" (UID: \"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47\") " pod="openstack/barbican-worker-5f6bb4646c-c8vt4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.241360 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94bcd39d-bca7-4d51-9327-aec08e22b60a-config-data\") pod \"barbican-keystone-listener-7bb46f45d-mgdw4\" (UID: \"94bcd39d-bca7-4d51-9327-aec08e22b60a\") " pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.241439 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94bcd39d-bca7-4d51-9327-aec08e22b60a-logs\") pod \"barbican-keystone-listener-7bb46f45d-mgdw4\" (UID: \"94bcd39d-bca7-4d51-9327-aec08e22b60a\") " pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.241973 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94bcd39d-bca7-4d51-9327-aec08e22b60a-logs\") pod \"barbican-keystone-listener-7bb46f45d-mgdw4\" (UID: \"94bcd39d-bca7-4d51-9327-aec08e22b60a\") " pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.250984 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be6e9b94-6d9a-46ae-ae15-5d9516e4ee47-combined-ca-bundle\") pod \"barbican-worker-5f6bb4646c-c8vt4\" (UID: \"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47\") " pod="openstack/barbican-worker-5f6bb4646c-c8vt4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.254871 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94bcd39d-bca7-4d51-9327-aec08e22b60a-config-data-custom\") pod \"barbican-keystone-listener-7bb46f45d-mgdw4\" (UID: \"94bcd39d-bca7-4d51-9327-aec08e22b60a\") " pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.255217 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be6e9b94-6d9a-46ae-ae15-5d9516e4ee47-logs\") pod \"barbican-worker-5f6bb4646c-c8vt4\" (UID: \"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47\") " pod="openstack/barbican-worker-5f6bb4646c-c8vt4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.257059 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94bcd39d-bca7-4d51-9327-aec08e22b60a-combined-ca-bundle\") pod \"barbican-keystone-listener-7bb46f45d-mgdw4\" (UID: \"94bcd39d-bca7-4d51-9327-aec08e22b60a\") " pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.258235 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be6e9b94-6d9a-46ae-ae15-5d9516e4ee47-config-data\") pod \"barbican-worker-5f6bb4646c-c8vt4\" (UID: \"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47\") " pod="openstack/barbican-worker-5f6bb4646c-c8vt4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.260015 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/be6e9b94-6d9a-46ae-ae15-5d9516e4ee47-config-data-custom\") pod \"barbican-worker-5f6bb4646c-c8vt4\" (UID: \"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47\") " pod="openstack/barbican-worker-5f6bb4646c-c8vt4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.282443 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgrfp\" (UniqueName: \"kubernetes.io/projected/be6e9b94-6d9a-46ae-ae15-5d9516e4ee47-kube-api-access-kgrfp\") pod \"barbican-worker-5f6bb4646c-c8vt4\" (UID: \"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47\") " pod="openstack/barbican-worker-5f6bb4646c-c8vt4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.297630 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94bcd39d-bca7-4d51-9327-aec08e22b60a-config-data\") pod \"barbican-keystone-listener-7bb46f45d-mgdw4\" (UID: \"94bcd39d-bca7-4d51-9327-aec08e22b60a\") " pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.313229 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wg5vp\" (UniqueName: \"kubernetes.io/projected/94bcd39d-bca7-4d51-9327-aec08e22b60a-kube-api-access-wg5vp\") pod \"barbican-keystone-listener-7bb46f45d-mgdw4\" (UID: \"94bcd39d-bca7-4d51-9327-aec08e22b60a\") " pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.314573 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-555854dc66-rf9mg"] Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.318361 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.332536 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.342481 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-config-data\") pod \"barbican-api-555854dc66-rf9mg\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.342537 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-dns-svc\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.342561 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.342600 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-config\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.342631 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.342672 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rd77\" (UniqueName: \"kubernetes.io/projected/82c5049d-0d61-4749-a025-85d112c1c9a4-kube-api-access-5rd77\") pod \"barbican-api-555854dc66-rf9mg\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.342691 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hljr9\" (UniqueName: \"kubernetes.io/projected/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-kube-api-access-hljr9\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.342712 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/82c5049d-0d61-4749-a025-85d112c1c9a4-logs\") pod \"barbican-api-555854dc66-rf9mg\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.342735 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-config-data-custom\") pod \"barbican-api-555854dc66-rf9mg\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.342760 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.342819 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-combined-ca-bundle\") pod \"barbican-api-555854dc66-rf9mg\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.344055 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-555854dc66-rf9mg"] Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.395159 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5f6bb4646c-c8vt4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.444623 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.451208 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-config-data\") pod \"barbican-api-555854dc66-rf9mg\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.451416 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-dns-svc\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.451532 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.451801 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-config\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.451952 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.452145 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rd77\" (UniqueName: \"kubernetes.io/projected/82c5049d-0d61-4749-a025-85d112c1c9a4-kube-api-access-5rd77\") pod \"barbican-api-555854dc66-rf9mg\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.452244 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hljr9\" (UniqueName: \"kubernetes.io/projected/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-kube-api-access-hljr9\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.452336 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/82c5049d-0d61-4749-a025-85d112c1c9a4-logs\") pod \"barbican-api-555854dc66-rf9mg\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.452452 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-config-data-custom\") pod \"barbican-api-555854dc66-rf9mg\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.452560 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.452792 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-combined-ca-bundle\") pod \"barbican-api-555854dc66-rf9mg\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.456793 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-dns-svc\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.457142 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/82c5049d-0d61-4749-a025-85d112c1c9a4-logs\") pod \"barbican-api-555854dc66-rf9mg\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.457190 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.457777 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.457874 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-config\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.463684 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.464524 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-config-data-custom\") pod \"barbican-api-555854dc66-rf9mg\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.465534 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-config-data\") pod \"barbican-api-555854dc66-rf9mg\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.467866 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-combined-ca-bundle\") pod \"barbican-api-555854dc66-rf9mg\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.478197 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hljr9\" (UniqueName: \"kubernetes.io/projected/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-kube-api-access-hljr9\") pod \"dnsmasq-dns-85ff748b95-bzrvq\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.498250 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rd77\" (UniqueName: \"kubernetes.io/projected/82c5049d-0d61-4749-a025-85d112c1c9a4-kube-api-access-5rd77\") pod \"barbican-api-555854dc66-rf9mg\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.617138 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.628128 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.752173 4689 generic.go:334] "Generic (PLEG): container finished" podID="836053c1-e5a4-4e49-a688-f4533fb5d605" containerID="492de78ead55251f430989d7c2561a4e4b38323e5621db9e317d020cb95a3a9c" exitCode=0 Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.752265 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" event={"ID":"836053c1-e5a4-4e49-a688-f4533fb5d605","Type":"ContainerDied","Data":"492de78ead55251f430989d7c2561a4e4b38323e5621db9e317d020cb95a3a9c"} Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.779719 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e13bd179-aefc-4620-92fd-d4563d7b7f72","Type":"ContainerStarted","Data":"115a3de742bed40baecd3f8e0729b5403f4fc9e9bcf2ceaddc69705dff7e1eb6"} Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.859156 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:27:53 crc kubenswrapper[4689]: I1013 21:27:53.859293 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.089345 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.163102 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5f6bb4646c-c8vt4"] Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.183194 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8xdq\" (UniqueName: \"kubernetes.io/projected/836053c1-e5a4-4e49-a688-f4533fb5d605-kube-api-access-d8xdq\") pod \"836053c1-e5a4-4e49-a688-f4533fb5d605\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.183311 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-ovsdbserver-nb\") pod \"836053c1-e5a4-4e49-a688-f4533fb5d605\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.183338 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-config\") pod \"836053c1-e5a4-4e49-a688-f4533fb5d605\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.183436 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-ovsdbserver-sb\") pod \"836053c1-e5a4-4e49-a688-f4533fb5d605\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.183491 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-dns-svc\") pod \"836053c1-e5a4-4e49-a688-f4533fb5d605\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.183672 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-dns-swift-storage-0\") pod \"836053c1-e5a4-4e49-a688-f4533fb5d605\" (UID: \"836053c1-e5a4-4e49-a688-f4533fb5d605\") " Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.233045 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/836053c1-e5a4-4e49-a688-f4533fb5d605-kube-api-access-d8xdq" (OuterVolumeSpecName: "kube-api-access-d8xdq") pod "836053c1-e5a4-4e49-a688-f4533fb5d605" (UID: "836053c1-e5a4-4e49-a688-f4533fb5d605"). InnerVolumeSpecName "kube-api-access-d8xdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.280694 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "836053c1-e5a4-4e49-a688-f4533fb5d605" (UID: "836053c1-e5a4-4e49-a688-f4533fb5d605"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.286072 4689 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.286101 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8xdq\" (UniqueName: \"kubernetes.io/projected/836053c1-e5a4-4e49-a688-f4533fb5d605-kube-api-access-d8xdq\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.364836 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7bb46f45d-mgdw4"] Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.408761 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "836053c1-e5a4-4e49-a688-f4533fb5d605" (UID: "836053c1-e5a4-4e49-a688-f4533fb5d605"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.428410 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "836053c1-e5a4-4e49-a688-f4533fb5d605" (UID: "836053c1-e5a4-4e49-a688-f4533fb5d605"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.467864 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-config" (OuterVolumeSpecName: "config") pod "836053c1-e5a4-4e49-a688-f4533fb5d605" (UID: "836053c1-e5a4-4e49-a688-f4533fb5d605"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.474115 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "836053c1-e5a4-4e49-a688-f4533fb5d605" (UID: "836053c1-e5a4-4e49-a688-f4533fb5d605"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.494320 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.494354 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.494363 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.494372 4689 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/836053c1-e5a4-4e49-a688-f4533fb5d605-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.519184 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-bzrvq"] Oct 13 21:27:54 crc kubenswrapper[4689]: W1013 21:27:54.528748 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod82c5049d_0d61_4749_a025_85d112c1c9a4.slice/crio-3f1f053189d9cd93d619291923a717a5816ac12dab93b2cb3a0ba7d94f81b3e5 WatchSource:0}: Error finding container 3f1f053189d9cd93d619291923a717a5816ac12dab93b2cb3a0ba7d94f81b3e5: Status 404 returned error can't find the container with id 3f1f053189d9cd93d619291923a717a5816ac12dab93b2cb3a0ba7d94f81b3e5 Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.536000 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-555854dc66-rf9mg"] Oct 13 21:27:54 crc kubenswrapper[4689]: W1013 21:27:54.536671 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab8bc24e_29b3_4f3f_a0cc_12efaaeb9fb6.slice/crio-ea30ca8c70573855f18d5f400fe77e7e107fb714b887a8c4fa00e322e300513f WatchSource:0}: Error finding container ea30ca8c70573855f18d5f400fe77e7e107fb714b887a8c4fa00e322e300513f: Status 404 returned error can't find the container with id ea30ca8c70573855f18d5f400fe77e7e107fb714b887a8c4fa00e322e300513f Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.792177 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e13bd179-aefc-4620-92fd-d4563d7b7f72","Type":"ContainerStarted","Data":"b35e6fe22b04a2fb2030bc497b067a454e1944dd9e850d52d146d87f8e162b5a"} Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.795185 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5f6bb4646c-c8vt4" event={"ID":"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47","Type":"ContainerStarted","Data":"793077944dcb199479698076926b2ea149d8748e996394f84f9fb880f3729927"} Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.801313 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" event={"ID":"836053c1-e5a4-4e49-a688-f4533fb5d605","Type":"ContainerDied","Data":"dde12c4dc2e1463d0afc2d940ca47ee61e0d29e481a3e5267a7ff51e6cc9e67a"} Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.801367 4689 scope.go:117] "RemoveContainer" containerID="492de78ead55251f430989d7c2561a4e4b38323e5621db9e317d020cb95a3a9c" Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.801558 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-4fqs2" Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.822727 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" event={"ID":"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6","Type":"ContainerStarted","Data":"ea30ca8c70573855f18d5f400fe77e7e107fb714b887a8c4fa00e322e300513f"} Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.843329 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" event={"ID":"94bcd39d-bca7-4d51-9327-aec08e22b60a","Type":"ContainerStarted","Data":"85ce156909e56cdb513b8a983c8d8386c6826b95952ac1914fddc80017c13dd1"} Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.843384 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-4fqs2"] Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.845632 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-555854dc66-rf9mg" event={"ID":"82c5049d-0d61-4749-a025-85d112c1c9a4","Type":"ContainerStarted","Data":"f52a6e503c9a7db1bafe202035f1e951b8462908eadb1de89f23ddfc24300c78"} Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.845670 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-555854dc66-rf9mg" event={"ID":"82c5049d-0d61-4749-a025-85d112c1c9a4","Type":"ContainerStarted","Data":"3f1f053189d9cd93d619291923a717a5816ac12dab93b2cb3a0ba7d94f81b3e5"} Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.850431 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-4fqs2"] Oct 13 21:27:54 crc kubenswrapper[4689]: I1013 21:27:54.921735 4689 scope.go:117] "RemoveContainer" containerID="566bc8031bb1904d28a8edb7d85204aa303769d45875bb7e4e3e79f919fa0049" Oct 13 21:27:55 crc kubenswrapper[4689]: I1013 21:27:55.860885 4689 generic.go:334] "Generic (PLEG): container finished" podID="ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6" containerID="2fcdf794279b7aa765633613afa587666a0206c5c84ddadc4dee097b45f599eb" exitCode=0 Oct 13 21:27:55 crc kubenswrapper[4689]: I1013 21:27:55.861520 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" event={"ID":"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6","Type":"ContainerDied","Data":"2fcdf794279b7aa765633613afa587666a0206c5c84ddadc4dee097b45f599eb"} Oct 13 21:27:55 crc kubenswrapper[4689]: I1013 21:27:55.914727 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="836053c1-e5a4-4e49-a688-f4533fb5d605" path="/var/lib/kubelet/pods/836053c1-e5a4-4e49-a688-f4533fb5d605/volumes" Oct 13 21:27:55 crc kubenswrapper[4689]: I1013 21:27:55.915713 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-555854dc66-rf9mg" event={"ID":"82c5049d-0d61-4749-a025-85d112c1c9a4","Type":"ContainerStarted","Data":"139efe3f65346100f078d6bc1bf4c709d269dee77a578778149c4f702314d58d"} Oct 13 21:27:55 crc kubenswrapper[4689]: I1013 21:27:55.940372 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-555854dc66-rf9mg" podStartSLOduration=2.9403524819999998 podStartE2EDuration="2.940352482s" podCreationTimestamp="2025-10-13 21:27:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:27:55.934036152 +0000 UTC m=+992.852281247" watchObservedRunningTime="2025-10-13 21:27:55.940352482 +0000 UTC m=+992.858597567" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.426104 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6545bbd774-g576b"] Oct 13 21:27:56 crc kubenswrapper[4689]: E1013 21:27:56.426663 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="836053c1-e5a4-4e49-a688-f4533fb5d605" containerName="dnsmasq-dns" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.426683 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="836053c1-e5a4-4e49-a688-f4533fb5d605" containerName="dnsmasq-dns" Oct 13 21:27:56 crc kubenswrapper[4689]: E1013 21:27:56.426703 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="836053c1-e5a4-4e49-a688-f4533fb5d605" containerName="init" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.426710 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="836053c1-e5a4-4e49-a688-f4533fb5d605" containerName="init" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.426951 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="836053c1-e5a4-4e49-a688-f4533fb5d605" containerName="dnsmasq-dns" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.428014 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.431979 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.434295 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.437901 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6545bbd774-g576b"] Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.558657 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/839e3e00-f791-4fb8-8df0-677c8e9a0c27-combined-ca-bundle\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.558725 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/839e3e00-f791-4fb8-8df0-677c8e9a0c27-logs\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.558814 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/839e3e00-f791-4fb8-8df0-677c8e9a0c27-config-data\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.558920 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/839e3e00-f791-4fb8-8df0-677c8e9a0c27-config-data-custom\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.558943 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4wgb\" (UniqueName: \"kubernetes.io/projected/839e3e00-f791-4fb8-8df0-677c8e9a0c27-kube-api-access-j4wgb\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.558967 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/839e3e00-f791-4fb8-8df0-677c8e9a0c27-internal-tls-certs\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.558983 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/839e3e00-f791-4fb8-8df0-677c8e9a0c27-public-tls-certs\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.659948 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/839e3e00-f791-4fb8-8df0-677c8e9a0c27-internal-tls-certs\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.659996 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/839e3e00-f791-4fb8-8df0-677c8e9a0c27-public-tls-certs\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.660027 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/839e3e00-f791-4fb8-8df0-677c8e9a0c27-combined-ca-bundle\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.660062 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/839e3e00-f791-4fb8-8df0-677c8e9a0c27-logs\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.660142 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/839e3e00-f791-4fb8-8df0-677c8e9a0c27-config-data\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.660200 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/839e3e00-f791-4fb8-8df0-677c8e9a0c27-config-data-custom\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.660224 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4wgb\" (UniqueName: \"kubernetes.io/projected/839e3e00-f791-4fb8-8df0-677c8e9a0c27-kube-api-access-j4wgb\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.662989 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/839e3e00-f791-4fb8-8df0-677c8e9a0c27-logs\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.666067 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/839e3e00-f791-4fb8-8df0-677c8e9a0c27-public-tls-certs\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.666431 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/839e3e00-f791-4fb8-8df0-677c8e9a0c27-internal-tls-certs\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.666494 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/839e3e00-f791-4fb8-8df0-677c8e9a0c27-combined-ca-bundle\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.667193 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/839e3e00-f791-4fb8-8df0-677c8e9a0c27-config-data-custom\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.668622 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/839e3e00-f791-4fb8-8df0-677c8e9a0c27-config-data\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.683021 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4wgb\" (UniqueName: \"kubernetes.io/projected/839e3e00-f791-4fb8-8df0-677c8e9a0c27-kube-api-access-j4wgb\") pod \"barbican-api-6545bbd774-g576b\" (UID: \"839e3e00-f791-4fb8-8df0-677c8e9a0c27\") " pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.751977 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.880108 4689 generic.go:334] "Generic (PLEG): container finished" podID="2c5019e8-86d0-4324-bffc-70583983b377" containerID="f793658662f1d70ea731e85d48cd1c012dab43f36fbdf77b6d23d5bc801a94a7" exitCode=0 Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.880184 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-xsdlt" event={"ID":"2c5019e8-86d0-4324-bffc-70583983b377","Type":"ContainerDied","Data":"f793658662f1d70ea731e85d48cd1c012dab43f36fbdf77b6d23d5bc801a94a7"} Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.884265 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:56 crc kubenswrapper[4689]: I1013 21:27:56.884690 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:27:57 crc kubenswrapper[4689]: I1013 21:27:57.930001 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" event={"ID":"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6","Type":"ContainerStarted","Data":"72461fc0747f336e032f182aec828354b5c1d61d25adbb53615e8dc1a3088560"} Oct 13 21:27:57 crc kubenswrapper[4689]: I1013 21:27:57.930512 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:27:57 crc kubenswrapper[4689]: I1013 21:27:57.965162 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" podStartSLOduration=4.965144572 podStartE2EDuration="4.965144572s" podCreationTimestamp="2025-10-13 21:27:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:27:57.952301388 +0000 UTC m=+994.870546473" watchObservedRunningTime="2025-10-13 21:27:57.965144572 +0000 UTC m=+994.883389657" Oct 13 21:27:57 crc kubenswrapper[4689]: I1013 21:27:57.979793 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6545bbd774-g576b"] Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.273700 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.400237 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-scripts\") pod \"2c5019e8-86d0-4324-bffc-70583983b377\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.400290 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ch4rt\" (UniqueName: \"kubernetes.io/projected/2c5019e8-86d0-4324-bffc-70583983b377-kube-api-access-ch4rt\") pod \"2c5019e8-86d0-4324-bffc-70583983b377\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.400603 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-combined-ca-bundle\") pod \"2c5019e8-86d0-4324-bffc-70583983b377\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.400667 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-config-data\") pod \"2c5019e8-86d0-4324-bffc-70583983b377\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.400695 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c5019e8-86d0-4324-bffc-70583983b377-etc-machine-id\") pod \"2c5019e8-86d0-4324-bffc-70583983b377\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.400749 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-db-sync-config-data\") pod \"2c5019e8-86d0-4324-bffc-70583983b377\" (UID: \"2c5019e8-86d0-4324-bffc-70583983b377\") " Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.408395 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-scripts" (OuterVolumeSpecName: "scripts") pod "2c5019e8-86d0-4324-bffc-70583983b377" (UID: "2c5019e8-86d0-4324-bffc-70583983b377"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.408620 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2c5019e8-86d0-4324-bffc-70583983b377-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "2c5019e8-86d0-4324-bffc-70583983b377" (UID: "2c5019e8-86d0-4324-bffc-70583983b377"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.418353 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c5019e8-86d0-4324-bffc-70583983b377-kube-api-access-ch4rt" (OuterVolumeSpecName: "kube-api-access-ch4rt") pod "2c5019e8-86d0-4324-bffc-70583983b377" (UID: "2c5019e8-86d0-4324-bffc-70583983b377"). InnerVolumeSpecName "kube-api-access-ch4rt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.425376 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "2c5019e8-86d0-4324-bffc-70583983b377" (UID: "2c5019e8-86d0-4324-bffc-70583983b377"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.473850 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2c5019e8-86d0-4324-bffc-70583983b377" (UID: "2c5019e8-86d0-4324-bffc-70583983b377"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.505774 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.505815 4689 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c5019e8-86d0-4324-bffc-70583983b377-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.505828 4689 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.505838 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.505848 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ch4rt\" (UniqueName: \"kubernetes.io/projected/2c5019e8-86d0-4324-bffc-70583983b377-kube-api-access-ch4rt\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.510064 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-config-data" (OuterVolumeSpecName: "config-data") pod "2c5019e8-86d0-4324-bffc-70583983b377" (UID: "2c5019e8-86d0-4324-bffc-70583983b377"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.606963 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c5019e8-86d0-4324-bffc-70583983b377-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.912313 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-5f54846cc7-sc4qr"] Oct 13 21:27:58 crc kubenswrapper[4689]: E1013 21:27:58.913298 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c5019e8-86d0-4324-bffc-70583983b377" containerName="cinder-db-sync" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.913409 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c5019e8-86d0-4324-bffc-70583983b377" containerName="cinder-db-sync" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.913784 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c5019e8-86d0-4324-bffc-70583983b377" containerName="cinder-db-sync" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.915037 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.926624 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.927080 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.927233 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.951966 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5f6bb4646c-c8vt4" event={"ID":"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47","Type":"ContainerStarted","Data":"aa47b01350291b9be972167f49e7c8d493a2918348cb3df9443dde9ef7534338"} Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.952324 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5f6bb4646c-c8vt4" event={"ID":"be6e9b94-6d9a-46ae-ae15-5d9516e4ee47","Type":"ContainerStarted","Data":"dd388fd41439ec3058b02446da42b6f709ff47ff9a5fe3d5cf843923b4f330c3"} Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.957295 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5f54846cc7-sc4qr"] Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.961557 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6545bbd774-g576b" event={"ID":"839e3e00-f791-4fb8-8df0-677c8e9a0c27","Type":"ContainerStarted","Data":"8291562f80d74cb1f4f9a4587a654660c5b560b0e46a9585224a21bd89bcb711"} Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.961797 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6545bbd774-g576b" event={"ID":"839e3e00-f791-4fb8-8df0-677c8e9a0c27","Type":"ContainerStarted","Data":"d549ae4f4922c5306ee655ba50c4aaba297d5c6508113aac3d47670ed76188a5"} Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.961898 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6545bbd774-g576b" event={"ID":"839e3e00-f791-4fb8-8df0-677c8e9a0c27","Type":"ContainerStarted","Data":"2da9da013bf5942a154268eb1dd2435e5a81486e552b63b11263a94d101b80d5"} Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.962868 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.962983 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.982367 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5f6bb4646c-c8vt4" podStartSLOduration=3.52744577 podStartE2EDuration="6.982350084s" podCreationTimestamp="2025-10-13 21:27:52 +0000 UTC" firstStartedPulling="2025-10-13 21:27:54.185931328 +0000 UTC m=+991.104176413" lastFinishedPulling="2025-10-13 21:27:57.640835652 +0000 UTC m=+994.559080727" observedRunningTime="2025-10-13 21:27:58.977959311 +0000 UTC m=+995.896204386" watchObservedRunningTime="2025-10-13 21:27:58.982350084 +0000 UTC m=+995.900595159" Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.996882 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" event={"ID":"94bcd39d-bca7-4d51-9327-aec08e22b60a","Type":"ContainerStarted","Data":"a370dcb610ed9a9f942169a30e05d7519c33d0b6eb2edb65444896856fbeb3e1"} Oct 13 21:27:58 crc kubenswrapper[4689]: I1013 21:27:58.996934 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" event={"ID":"94bcd39d-bca7-4d51-9327-aec08e22b60a","Type":"ContainerStarted","Data":"1e2f074ccc5cdbe2a27e8c4fceb136cc6cdaa159935bf0bde05320f24364a76c"} Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.010468 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e13bd179-aefc-4620-92fd-d4563d7b7f72","Type":"ContainerStarted","Data":"d91b235d6f812348df2ec93426773ce5f9b20da2e1653170b772b2181d51035e"} Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.011359 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.015236 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bae630fb-d96c-45df-abb1-d7913a06d4e6-config-data\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.015300 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bae630fb-d96c-45df-abb1-d7913a06d4e6-log-httpd\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.015336 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bae630fb-d96c-45df-abb1-d7913a06d4e6-etc-swift\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.015352 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bae630fb-d96c-45df-abb1-d7913a06d4e6-combined-ca-bundle\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.015450 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bae630fb-d96c-45df-abb1-d7913a06d4e6-run-httpd\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.015468 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88rh2\" (UniqueName: \"kubernetes.io/projected/bae630fb-d96c-45df-abb1-d7913a06d4e6-kube-api-access-88rh2\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.015486 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bae630fb-d96c-45df-abb1-d7913a06d4e6-internal-tls-certs\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.015503 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bae630fb-d96c-45df-abb1-d7913a06d4e6-public-tls-certs\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.017788 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-xsdlt" event={"ID":"2c5019e8-86d0-4324-bffc-70583983b377","Type":"ContainerDied","Data":"3c9e77c5dad8f4500a3b9dbc8c961dbe48f5b6cbc8b860f40dadd13c2893471c"} Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.017817 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c9e77c5dad8f4500a3b9dbc8c961dbe48f5b6cbc8b860f40dadd13c2893471c" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.017859 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-xsdlt" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.035980 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6545bbd774-g576b" podStartSLOduration=3.035959291 podStartE2EDuration="3.035959291s" podCreationTimestamp="2025-10-13 21:27:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:27:59.002234325 +0000 UTC m=+995.920479410" watchObservedRunningTime="2025-10-13 21:27:59.035959291 +0000 UTC m=+995.954204376" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.049304 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-7bb46f45d-mgdw4" podStartSLOduration=3.838703051 podStartE2EDuration="7.049280835s" podCreationTimestamp="2025-10-13 21:27:52 +0000 UTC" firstStartedPulling="2025-10-13 21:27:54.440035169 +0000 UTC m=+991.358280254" lastFinishedPulling="2025-10-13 21:27:57.650612953 +0000 UTC m=+994.568858038" observedRunningTime="2025-10-13 21:27:59.036091544 +0000 UTC m=+995.954336629" watchObservedRunningTime="2025-10-13 21:27:59.049280835 +0000 UTC m=+995.967525920" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.117578 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bae630fb-d96c-45df-abb1-d7913a06d4e6-run-httpd\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.117655 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88rh2\" (UniqueName: \"kubernetes.io/projected/bae630fb-d96c-45df-abb1-d7913a06d4e6-kube-api-access-88rh2\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.117673 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bae630fb-d96c-45df-abb1-d7913a06d4e6-internal-tls-certs\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.117689 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bae630fb-d96c-45df-abb1-d7913a06d4e6-public-tls-certs\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.117894 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bae630fb-d96c-45df-abb1-d7913a06d4e6-config-data\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.117930 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bae630fb-d96c-45df-abb1-d7913a06d4e6-log-httpd\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.117959 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bae630fb-d96c-45df-abb1-d7913a06d4e6-etc-swift\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.117976 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bae630fb-d96c-45df-abb1-d7913a06d4e6-combined-ca-bundle\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.123194 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bae630fb-d96c-45df-abb1-d7913a06d4e6-run-httpd\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.124783 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bae630fb-d96c-45df-abb1-d7913a06d4e6-log-httpd\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.139432 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bae630fb-d96c-45df-abb1-d7913a06d4e6-public-tls-certs\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.149442 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bae630fb-d96c-45df-abb1-d7913a06d4e6-combined-ca-bundle\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.153322 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bae630fb-d96c-45df-abb1-d7913a06d4e6-internal-tls-certs\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.154547 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88rh2\" (UniqueName: \"kubernetes.io/projected/bae630fb-d96c-45df-abb1-d7913a06d4e6-kube-api-access-88rh2\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.154750 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bae630fb-d96c-45df-abb1-d7913a06d4e6-config-data\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.165089 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bae630fb-d96c-45df-abb1-d7913a06d4e6-etc-swift\") pod \"swift-proxy-5f54846cc7-sc4qr\" (UID: \"bae630fb-d96c-45df-abb1-d7913a06d4e6\") " pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.210098 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.481665401 podStartE2EDuration="11.210076853s" podCreationTimestamp="2025-10-13 21:27:48 +0000 UTC" firstStartedPulling="2025-10-13 21:27:49.88445996 +0000 UTC m=+986.802705045" lastFinishedPulling="2025-10-13 21:27:57.612871422 +0000 UTC m=+994.531116497" observedRunningTime="2025-10-13 21:27:59.096149703 +0000 UTC m=+996.014394778" watchObservedRunningTime="2025-10-13 21:27:59.210076853 +0000 UTC m=+996.128321938" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.211416 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.223714 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.235445 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-fcz4s" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.236332 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.236518 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.236716 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.250303 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.264955 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.338637 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rbn7\" (UniqueName: \"kubernetes.io/projected/8db5993f-1756-4551-bd90-dd15df0a1a49-kube-api-access-9rbn7\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.338944 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8db5993f-1756-4551-bd90-dd15df0a1a49-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.339034 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-config-data\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.339110 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.339189 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-scripts\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.339274 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.360683 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-bzrvq"] Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.434371 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-8q7f6"] Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.436266 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.440780 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rbn7\" (UniqueName: \"kubernetes.io/projected/8db5993f-1756-4551-bd90-dd15df0a1a49-kube-api-access-9rbn7\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.440841 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8db5993f-1756-4551-bd90-dd15df0a1a49-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.440865 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-config-data\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.440889 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.440915 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-scripts\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.440960 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.445203 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-config-data\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.445278 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8db5993f-1756-4551-bd90-dd15df0a1a49-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.447450 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-scripts\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.452215 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.455392 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.469918 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rbn7\" (UniqueName: \"kubernetes.io/projected/8db5993f-1756-4551-bd90-dd15df0a1a49-kube-api-access-9rbn7\") pod \"cinder-scheduler-0\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.497426 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-8q7f6"] Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.520035 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.521976 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.528897 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.540831 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.545213 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.545261 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.545297 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.545338 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.545378 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5krlr\" (UniqueName: \"kubernetes.io/projected/776abc4d-58f9-4028-ae6a-deeddad2a105-kube-api-access-5krlr\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.545410 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-config\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.605091 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.647913 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5krlr\" (UniqueName: \"kubernetes.io/projected/776abc4d-58f9-4028-ae6a-deeddad2a105-kube-api-access-5krlr\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.654141 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-config\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.654239 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-config\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.654479 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-config-data-custom\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.654569 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.654654 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-scripts\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.654843 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-logs\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.654917 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.655072 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-config-data\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.655158 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.655230 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.655272 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xxrk\" (UniqueName: \"kubernetes.io/projected/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-kube-api-access-7xxrk\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.655332 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.655422 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.656205 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.656983 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.657540 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.660670 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.673174 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5krlr\" (UniqueName: \"kubernetes.io/projected/776abc4d-58f9-4028-ae6a-deeddad2a105-kube-api-access-5krlr\") pod \"dnsmasq-dns-5c9776ccc5-8q7f6\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.758708 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-config-data-custom\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.758764 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.758786 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-scripts\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.758847 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-logs\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.758868 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.758902 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-config-data\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.758937 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xxrk\" (UniqueName: \"kubernetes.io/projected/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-kube-api-access-7xxrk\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.761571 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.768183 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-logs\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.784542 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.787492 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.790445 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-config-data-custom\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.791318 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-config-data\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.800601 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xxrk\" (UniqueName: \"kubernetes.io/projected/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-kube-api-access-7xxrk\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.850137 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-scripts\") pod \"cinder-api-0\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " pod="openstack/cinder-api-0" Oct 13 21:27:59 crc kubenswrapper[4689]: I1013 21:27:59.909532 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 13 21:28:00 crc kubenswrapper[4689]: I1013 21:28:00.090063 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" podUID="ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6" containerName="dnsmasq-dns" containerID="cri-o://72461fc0747f336e032f182aec828354b5c1d61d25adbb53615e8dc1a3088560" gracePeriod=10 Oct 13 21:28:00 crc kubenswrapper[4689]: I1013 21:28:00.169918 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5f54846cc7-sc4qr"] Oct 13 21:28:00 crc kubenswrapper[4689]: W1013 21:28:00.210101 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbae630fb_d96c_45df_abb1_d7913a06d4e6.slice/crio-aaae3b8550d53a2e093ba51f35a5901aefbd34615e4530ed82999e3d27cd4598 WatchSource:0}: Error finding container aaae3b8550d53a2e093ba51f35a5901aefbd34615e4530ed82999e3d27cd4598: Status 404 returned error can't find the container with id aaae3b8550d53a2e093ba51f35a5901aefbd34615e4530ed82999e3d27cd4598 Oct 13 21:28:00 crc kubenswrapper[4689]: I1013 21:28:00.344757 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 21:28:00 crc kubenswrapper[4689]: W1013 21:28:00.407090 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8db5993f_1756_4551_bd90_dd15df0a1a49.slice/crio-a9f8d806968723c4d618377432e2280e94db66b63e30769425b3fd2074de70c2 WatchSource:0}: Error finding container a9f8d806968723c4d618377432e2280e94db66b63e30769425b3fd2074de70c2: Status 404 returned error can't find the container with id a9f8d806968723c4d618377432e2280e94db66b63e30769425b3fd2074de70c2 Oct 13 21:28:00 crc kubenswrapper[4689]: I1013 21:28:00.554859 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:00 crc kubenswrapper[4689]: I1013 21:28:00.816103 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-8q7f6"] Oct 13 21:28:00 crc kubenswrapper[4689]: I1013 21:28:00.961137 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:28:00 crc kubenswrapper[4689]: I1013 21:28:00.965467 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 13 21:28:00 crc kubenswrapper[4689]: W1013 21:28:00.990251 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75cc0f0c_aa8d_4333_b83f_d0f740c8308c.slice/crio-7e6e984e3f461432ea4a7ac70822e7e40bb30e77c1f62458d5d2c367e92b2561 WatchSource:0}: Error finding container 7e6e984e3f461432ea4a7ac70822e7e40bb30e77c1f62458d5d2c367e92b2561: Status 404 returned error can't find the container with id 7e6e984e3f461432ea4a7ac70822e7e40bb30e77c1f62458d5d2c367e92b2561 Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.053198 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hljr9\" (UniqueName: \"kubernetes.io/projected/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-kube-api-access-hljr9\") pod \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.053272 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-ovsdbserver-sb\") pod \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.053302 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-dns-swift-storage-0\") pod \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.053327 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-config\") pod \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.053354 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-dns-svc\") pod \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.053501 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-ovsdbserver-nb\") pod \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\" (UID: \"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6\") " Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.083980 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-kube-api-access-hljr9" (OuterVolumeSpecName: "kube-api-access-hljr9") pod "ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6" (UID: "ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6"). InnerVolumeSpecName "kube-api-access-hljr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.140875 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8db5993f-1756-4551-bd90-dd15df0a1a49","Type":"ContainerStarted","Data":"a9f8d806968723c4d618377432e2280e94db66b63e30769425b3fd2074de70c2"} Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.144331 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" event={"ID":"776abc4d-58f9-4028-ae6a-deeddad2a105","Type":"ContainerStarted","Data":"5b6e55647b82db3f4e3c76bd0311918f605b2e933eb78fcbe1d13ecbc30af45b"} Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.147206 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"75cc0f0c-aa8d-4333-b83f-d0f740c8308c","Type":"ContainerStarted","Data":"7e6e984e3f461432ea4a7ac70822e7e40bb30e77c1f62458d5d2c367e92b2561"} Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.150889 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6" (UID: "ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.153497 4689 generic.go:334] "Generic (PLEG): container finished" podID="ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6" containerID="72461fc0747f336e032f182aec828354b5c1d61d25adbb53615e8dc1a3088560" exitCode=0 Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.153721 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.153748 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" event={"ID":"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6","Type":"ContainerDied","Data":"72461fc0747f336e032f182aec828354b5c1d61d25adbb53615e8dc1a3088560"} Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.155448 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-bzrvq" event={"ID":"ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6","Type":"ContainerDied","Data":"ea30ca8c70573855f18d5f400fe77e7e107fb714b887a8c4fa00e322e300513f"} Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.155721 4689 scope.go:117] "RemoveContainer" containerID="72461fc0747f336e032f182aec828354b5c1d61d25adbb53615e8dc1a3088560" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.156627 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hljr9\" (UniqueName: \"kubernetes.io/projected/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-kube-api-access-hljr9\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.156814 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.171137 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6" (UID: "ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.173887 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f54846cc7-sc4qr" event={"ID":"bae630fb-d96c-45df-abb1-d7913a06d4e6","Type":"ContainerStarted","Data":"5c06f72c27a33da1605bf7c941e7e38c94a00cf9fba4e83cf99bb50d5b3b7546"} Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.174632 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.174670 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.174682 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f54846cc7-sc4qr" event={"ID":"bae630fb-d96c-45df-abb1-d7913a06d4e6","Type":"ContainerStarted","Data":"aaae3b8550d53a2e093ba51f35a5901aefbd34615e4530ed82999e3d27cd4598"} Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.233730 4689 scope.go:117] "RemoveContainer" containerID="2fcdf794279b7aa765633613afa587666a0206c5c84ddadc4dee097b45f599eb" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.235039 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-config" (OuterVolumeSpecName: "config") pod "ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6" (UID: "ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.259332 4689 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.261920 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.290220 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6" (UID: "ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.299990 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6" (UID: "ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.313794 4689 scope.go:117] "RemoveContainer" containerID="72461fc0747f336e032f182aec828354b5c1d61d25adbb53615e8dc1a3088560" Oct 13 21:28:01 crc kubenswrapper[4689]: E1013 21:28:01.316144 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72461fc0747f336e032f182aec828354b5c1d61d25adbb53615e8dc1a3088560\": container with ID starting with 72461fc0747f336e032f182aec828354b5c1d61d25adbb53615e8dc1a3088560 not found: ID does not exist" containerID="72461fc0747f336e032f182aec828354b5c1d61d25adbb53615e8dc1a3088560" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.316185 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72461fc0747f336e032f182aec828354b5c1d61d25adbb53615e8dc1a3088560"} err="failed to get container status \"72461fc0747f336e032f182aec828354b5c1d61d25adbb53615e8dc1a3088560\": rpc error: code = NotFound desc = could not find container \"72461fc0747f336e032f182aec828354b5c1d61d25adbb53615e8dc1a3088560\": container with ID starting with 72461fc0747f336e032f182aec828354b5c1d61d25adbb53615e8dc1a3088560 not found: ID does not exist" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.316205 4689 scope.go:117] "RemoveContainer" containerID="2fcdf794279b7aa765633613afa587666a0206c5c84ddadc4dee097b45f599eb" Oct 13 21:28:01 crc kubenswrapper[4689]: E1013 21:28:01.321725 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fcdf794279b7aa765633613afa587666a0206c5c84ddadc4dee097b45f599eb\": container with ID starting with 2fcdf794279b7aa765633613afa587666a0206c5c84ddadc4dee097b45f599eb not found: ID does not exist" containerID="2fcdf794279b7aa765633613afa587666a0206c5c84ddadc4dee097b45f599eb" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.321765 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fcdf794279b7aa765633613afa587666a0206c5c84ddadc4dee097b45f599eb"} err="failed to get container status \"2fcdf794279b7aa765633613afa587666a0206c5c84ddadc4dee097b45f599eb\": rpc error: code = NotFound desc = could not find container \"2fcdf794279b7aa765633613afa587666a0206c5c84ddadc4dee097b45f599eb\": container with ID starting with 2fcdf794279b7aa765633613afa587666a0206c5c84ddadc4dee097b45f599eb not found: ID does not exist" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.365737 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.365806 4689 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.514858 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-5f54846cc7-sc4qr" podStartSLOduration=3.514829814 podStartE2EDuration="3.514829814s" podCreationTimestamp="2025-10-13 21:27:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:28:01.219194902 +0000 UTC m=+998.137439987" watchObservedRunningTime="2025-10-13 21:28:01.514829814 +0000 UTC m=+998.433074899" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.515488 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-bzrvq"] Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.542621 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-bzrvq"] Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.635557 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-795fd646bb-2s89l" podUID="fd448066-1b70-4e35-959c-5c702d87560f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Oct 13 21:28:01 crc kubenswrapper[4689]: I1013 21:28:01.883047 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6" path="/var/lib/kubelet/pods/ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6/volumes" Oct 13 21:28:02 crc kubenswrapper[4689]: I1013 21:28:02.028019 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 13 21:28:02 crc kubenswrapper[4689]: I1013 21:28:02.208898 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f54846cc7-sc4qr" event={"ID":"bae630fb-d96c-45df-abb1-d7913a06d4e6","Type":"ContainerStarted","Data":"7bd1f48e5ca168e5ae9d52a0cd173e851ae8b6a29b2c3d4f3c654ed9a47fe073"} Oct 13 21:28:02 crc kubenswrapper[4689]: I1013 21:28:02.215651 4689 generic.go:334] "Generic (PLEG): container finished" podID="776abc4d-58f9-4028-ae6a-deeddad2a105" containerID="c2b801a2882e90b44a263d807b9b70a7969920664fb97b229b1ccc0076266c69" exitCode=0 Oct 13 21:28:02 crc kubenswrapper[4689]: I1013 21:28:02.215719 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" event={"ID":"776abc4d-58f9-4028-ae6a-deeddad2a105","Type":"ContainerDied","Data":"c2b801a2882e90b44a263d807b9b70a7969920664fb97b229b1ccc0076266c69"} Oct 13 21:28:02 crc kubenswrapper[4689]: I1013 21:28:02.221189 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerName="ceilometer-central-agent" containerID="cri-o://6c985bac0347f948e8801a1b28c777b0e021a335328f39d47991fef6fa76e953" gracePeriod=30 Oct 13 21:28:02 crc kubenswrapper[4689]: I1013 21:28:02.221297 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"75cc0f0c-aa8d-4333-b83f-d0f740c8308c","Type":"ContainerStarted","Data":"6ae38d26a469f37d38dedb509f497b4336e5a05b26f5b28894df278d1d295c4d"} Oct 13 21:28:02 crc kubenswrapper[4689]: I1013 21:28:02.221355 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerName="proxy-httpd" containerID="cri-o://d91b235d6f812348df2ec93426773ce5f9b20da2e1653170b772b2181d51035e" gracePeriod=30 Oct 13 21:28:02 crc kubenswrapper[4689]: I1013 21:28:02.221393 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerName="sg-core" containerID="cri-o://b35e6fe22b04a2fb2030bc497b067a454e1944dd9e850d52d146d87f8e162b5a" gracePeriod=30 Oct 13 21:28:02 crc kubenswrapper[4689]: I1013 21:28:02.221436 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerName="ceilometer-notification-agent" containerID="cri-o://115a3de742bed40baecd3f8e0729b5403f4fc9e9bcf2ceaddc69705dff7e1eb6" gracePeriod=30 Oct 13 21:28:02 crc kubenswrapper[4689]: I1013 21:28:02.925829 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.235411 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8db5993f-1756-4551-bd90-dd15df0a1a49","Type":"ContainerStarted","Data":"25b63955eba5787d1654fbadad61e2738b473b745a58b6863bfec581ea7aeaf1"} Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.238743 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" event={"ID":"776abc4d-58f9-4028-ae6a-deeddad2a105","Type":"ContainerStarted","Data":"ea88e9a0d54143830376b8346458f966c5ed103cccc1c6e0959c286f551529b7"} Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.239923 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.249367 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"75cc0f0c-aa8d-4333-b83f-d0f740c8308c","Type":"ContainerStarted","Data":"e407ca308df5d7338f5db906ff946a4e1b20bf7036a0836eecc14ccccb69ed67"} Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.249392 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="75cc0f0c-aa8d-4333-b83f-d0f740c8308c" containerName="cinder-api-log" containerID="cri-o://6ae38d26a469f37d38dedb509f497b4336e5a05b26f5b28894df278d1d295c4d" gracePeriod=30 Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.249433 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="75cc0f0c-aa8d-4333-b83f-d0f740c8308c" containerName="cinder-api" containerID="cri-o://e407ca308df5d7338f5db906ff946a4e1b20bf7036a0836eecc14ccccb69ed67" gracePeriod=30 Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.249507 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.264391 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" podStartSLOduration=4.264371274 podStartE2EDuration="4.264371274s" podCreationTimestamp="2025-10-13 21:27:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:28:03.258373371 +0000 UTC m=+1000.176618456" watchObservedRunningTime="2025-10-13 21:28:03.264371274 +0000 UTC m=+1000.182616349" Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.268955 4689 generic.go:334] "Generic (PLEG): container finished" podID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerID="d91b235d6f812348df2ec93426773ce5f9b20da2e1653170b772b2181d51035e" exitCode=0 Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.268988 4689 generic.go:334] "Generic (PLEG): container finished" podID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerID="b35e6fe22b04a2fb2030bc497b067a454e1944dd9e850d52d146d87f8e162b5a" exitCode=2 Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.268996 4689 generic.go:334] "Generic (PLEG): container finished" podID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerID="115a3de742bed40baecd3f8e0729b5403f4fc9e9bcf2ceaddc69705dff7e1eb6" exitCode=0 Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.269932 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e13bd179-aefc-4620-92fd-d4563d7b7f72","Type":"ContainerDied","Data":"d91b235d6f812348df2ec93426773ce5f9b20da2e1653170b772b2181d51035e"} Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.269964 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e13bd179-aefc-4620-92fd-d4563d7b7f72","Type":"ContainerDied","Data":"b35e6fe22b04a2fb2030bc497b067a454e1944dd9e850d52d146d87f8e162b5a"} Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.269974 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e13bd179-aefc-4620-92fd-d4563d7b7f72","Type":"ContainerDied","Data":"115a3de742bed40baecd3f8e0729b5403f4fc9e9bcf2ceaddc69705dff7e1eb6"} Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.321525 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7d89fff484-q9fvk" Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.343443 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.34342208 podStartE2EDuration="4.34342208s" podCreationTimestamp="2025-10-13 21:27:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:28:03.277097904 +0000 UTC m=+1000.195342989" watchObservedRunningTime="2025-10-13 21:28:03.34342208 +0000 UTC m=+1000.261667165" Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.831182 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.939217 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e13bd179-aefc-4620-92fd-d4563d7b7f72-run-httpd\") pod \"e13bd179-aefc-4620-92fd-d4563d7b7f72\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.939523 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llmn6\" (UniqueName: \"kubernetes.io/projected/e13bd179-aefc-4620-92fd-d4563d7b7f72-kube-api-access-llmn6\") pod \"e13bd179-aefc-4620-92fd-d4563d7b7f72\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.939714 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-scripts\") pod \"e13bd179-aefc-4620-92fd-d4563d7b7f72\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.939902 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e13bd179-aefc-4620-92fd-d4563d7b7f72-log-httpd\") pod \"e13bd179-aefc-4620-92fd-d4563d7b7f72\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.939951 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e13bd179-aefc-4620-92fd-d4563d7b7f72-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e13bd179-aefc-4620-92fd-d4563d7b7f72" (UID: "e13bd179-aefc-4620-92fd-d4563d7b7f72"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.939978 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-sg-core-conf-yaml\") pod \"e13bd179-aefc-4620-92fd-d4563d7b7f72\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.940066 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-combined-ca-bundle\") pod \"e13bd179-aefc-4620-92fd-d4563d7b7f72\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.940148 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-config-data\") pod \"e13bd179-aefc-4620-92fd-d4563d7b7f72\" (UID: \"e13bd179-aefc-4620-92fd-d4563d7b7f72\") " Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.940931 4689 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e13bd179-aefc-4620-92fd-d4563d7b7f72-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.949861 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e13bd179-aefc-4620-92fd-d4563d7b7f72-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e13bd179-aefc-4620-92fd-d4563d7b7f72" (UID: "e13bd179-aefc-4620-92fd-d4563d7b7f72"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.962497 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-scripts" (OuterVolumeSpecName: "scripts") pod "e13bd179-aefc-4620-92fd-d4563d7b7f72" (UID: "e13bd179-aefc-4620-92fd-d4563d7b7f72"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:03 crc kubenswrapper[4689]: I1013 21:28:03.978107 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e13bd179-aefc-4620-92fd-d4563d7b7f72-kube-api-access-llmn6" (OuterVolumeSpecName: "kube-api-access-llmn6") pod "e13bd179-aefc-4620-92fd-d4563d7b7f72" (UID: "e13bd179-aefc-4620-92fd-d4563d7b7f72"). InnerVolumeSpecName "kube-api-access-llmn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.042847 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llmn6\" (UniqueName: \"kubernetes.io/projected/e13bd179-aefc-4620-92fd-d4563d7b7f72-kube-api-access-llmn6\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.042878 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.042888 4689 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e13bd179-aefc-4620-92fd-d4563d7b7f72-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.077483 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e13bd179-aefc-4620-92fd-d4563d7b7f72" (UID: "e13bd179-aefc-4620-92fd-d4563d7b7f72"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.094711 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e13bd179-aefc-4620-92fd-d4563d7b7f72" (UID: "e13bd179-aefc-4620-92fd-d4563d7b7f72"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.144913 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.144953 4689 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.189809 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-config-data" (OuterVolumeSpecName: "config-data") pod "e13bd179-aefc-4620-92fd-d4563d7b7f72" (UID: "e13bd179-aefc-4620-92fd-d4563d7b7f72"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.247169 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e13bd179-aefc-4620-92fd-d4563d7b7f72-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.279991 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8db5993f-1756-4551-bd90-dd15df0a1a49","Type":"ContainerStarted","Data":"d270eefa85039c475c36cae95583532e1a1b3ec39abaa65aa36190af87820035"} Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.282993 4689 generic.go:334] "Generic (PLEG): container finished" podID="75cc0f0c-aa8d-4333-b83f-d0f740c8308c" containerID="6ae38d26a469f37d38dedb509f497b4336e5a05b26f5b28894df278d1d295c4d" exitCode=143 Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.283071 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"75cc0f0c-aa8d-4333-b83f-d0f740c8308c","Type":"ContainerDied","Data":"6ae38d26a469f37d38dedb509f497b4336e5a05b26f5b28894df278d1d295c4d"} Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.286920 4689 generic.go:334] "Generic (PLEG): container finished" podID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerID="6c985bac0347f948e8801a1b28c777b0e021a335328f39d47991fef6fa76e953" exitCode=0 Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.287455 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.288888 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e13bd179-aefc-4620-92fd-d4563d7b7f72","Type":"ContainerDied","Data":"6c985bac0347f948e8801a1b28c777b0e021a335328f39d47991fef6fa76e953"} Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.288952 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e13bd179-aefc-4620-92fd-d4563d7b7f72","Type":"ContainerDied","Data":"e781d7e391e45be04fe0348717b96a851f890697a3d63575bf6f6006924fff21"} Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.288973 4689 scope.go:117] "RemoveContainer" containerID="d91b235d6f812348df2ec93426773ce5f9b20da2e1653170b772b2181d51035e" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.308329 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.173721503 podStartE2EDuration="5.308298008s" podCreationTimestamp="2025-10-13 21:27:59 +0000 UTC" firstStartedPulling="2025-10-13 21:28:00.410630627 +0000 UTC m=+997.328875702" lastFinishedPulling="2025-10-13 21:28:01.545207132 +0000 UTC m=+998.463452207" observedRunningTime="2025-10-13 21:28:04.301853966 +0000 UTC m=+1001.220099051" watchObservedRunningTime="2025-10-13 21:28:04.308298008 +0000 UTC m=+1001.226543093" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.332647 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.334130 4689 scope.go:117] "RemoveContainer" containerID="b35e6fe22b04a2fb2030bc497b067a454e1944dd9e850d52d146d87f8e162b5a" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.343544 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.371351 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:04 crc kubenswrapper[4689]: E1013 21:28:04.371726 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerName="sg-core" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.371738 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerName="sg-core" Oct 13 21:28:04 crc kubenswrapper[4689]: E1013 21:28:04.371751 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerName="ceilometer-notification-agent" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.371759 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerName="ceilometer-notification-agent" Oct 13 21:28:04 crc kubenswrapper[4689]: E1013 21:28:04.371766 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerName="proxy-httpd" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.371772 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerName="proxy-httpd" Oct 13 21:28:04 crc kubenswrapper[4689]: E1013 21:28:04.371782 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerName="ceilometer-central-agent" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.371788 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerName="ceilometer-central-agent" Oct 13 21:28:04 crc kubenswrapper[4689]: E1013 21:28:04.371803 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6" containerName="init" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.371809 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6" containerName="init" Oct 13 21:28:04 crc kubenswrapper[4689]: E1013 21:28:04.371837 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6" containerName="dnsmasq-dns" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.371843 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6" containerName="dnsmasq-dns" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.372000 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerName="proxy-httpd" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.372021 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerName="ceilometer-central-agent" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.372032 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerName="ceilometer-notification-agent" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.372047 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="e13bd179-aefc-4620-92fd-d4563d7b7f72" containerName="sg-core" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.372056 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab8bc24e-29b3-4f3f-a0cc-12efaaeb9fb6" containerName="dnsmasq-dns" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.374519 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.381360 4689 scope.go:117] "RemoveContainer" containerID="115a3de742bed40baecd3f8e0729b5403f4fc9e9bcf2ceaddc69705dff7e1eb6" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.386911 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.392226 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.394257 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.450136 4689 scope.go:117] "RemoveContainer" containerID="6c985bac0347f948e8801a1b28c777b0e021a335328f39d47991fef6fa76e953" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.452154 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.452243 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-config-data\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.452280 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.452301 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-run-httpd\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.452320 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-scripts\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.452389 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-log-httpd\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.452432 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzd7c\" (UniqueName: \"kubernetes.io/projected/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-kube-api-access-tzd7c\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.533635 4689 scope.go:117] "RemoveContainer" containerID="d91b235d6f812348df2ec93426773ce5f9b20da2e1653170b772b2181d51035e" Oct 13 21:28:04 crc kubenswrapper[4689]: E1013 21:28:04.536885 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d91b235d6f812348df2ec93426773ce5f9b20da2e1653170b772b2181d51035e\": container with ID starting with d91b235d6f812348df2ec93426773ce5f9b20da2e1653170b772b2181d51035e not found: ID does not exist" containerID="d91b235d6f812348df2ec93426773ce5f9b20da2e1653170b772b2181d51035e" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.536957 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d91b235d6f812348df2ec93426773ce5f9b20da2e1653170b772b2181d51035e"} err="failed to get container status \"d91b235d6f812348df2ec93426773ce5f9b20da2e1653170b772b2181d51035e\": rpc error: code = NotFound desc = could not find container \"d91b235d6f812348df2ec93426773ce5f9b20da2e1653170b772b2181d51035e\": container with ID starting with d91b235d6f812348df2ec93426773ce5f9b20da2e1653170b772b2181d51035e not found: ID does not exist" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.537006 4689 scope.go:117] "RemoveContainer" containerID="b35e6fe22b04a2fb2030bc497b067a454e1944dd9e850d52d146d87f8e162b5a" Oct 13 21:28:04 crc kubenswrapper[4689]: E1013 21:28:04.541358 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b35e6fe22b04a2fb2030bc497b067a454e1944dd9e850d52d146d87f8e162b5a\": container with ID starting with b35e6fe22b04a2fb2030bc497b067a454e1944dd9e850d52d146d87f8e162b5a not found: ID does not exist" containerID="b35e6fe22b04a2fb2030bc497b067a454e1944dd9e850d52d146d87f8e162b5a" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.541401 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b35e6fe22b04a2fb2030bc497b067a454e1944dd9e850d52d146d87f8e162b5a"} err="failed to get container status \"b35e6fe22b04a2fb2030bc497b067a454e1944dd9e850d52d146d87f8e162b5a\": rpc error: code = NotFound desc = could not find container \"b35e6fe22b04a2fb2030bc497b067a454e1944dd9e850d52d146d87f8e162b5a\": container with ID starting with b35e6fe22b04a2fb2030bc497b067a454e1944dd9e850d52d146d87f8e162b5a not found: ID does not exist" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.541452 4689 scope.go:117] "RemoveContainer" containerID="115a3de742bed40baecd3f8e0729b5403f4fc9e9bcf2ceaddc69705dff7e1eb6" Oct 13 21:28:04 crc kubenswrapper[4689]: E1013 21:28:04.542015 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"115a3de742bed40baecd3f8e0729b5403f4fc9e9bcf2ceaddc69705dff7e1eb6\": container with ID starting with 115a3de742bed40baecd3f8e0729b5403f4fc9e9bcf2ceaddc69705dff7e1eb6 not found: ID does not exist" containerID="115a3de742bed40baecd3f8e0729b5403f4fc9e9bcf2ceaddc69705dff7e1eb6" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.542086 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"115a3de742bed40baecd3f8e0729b5403f4fc9e9bcf2ceaddc69705dff7e1eb6"} err="failed to get container status \"115a3de742bed40baecd3f8e0729b5403f4fc9e9bcf2ceaddc69705dff7e1eb6\": rpc error: code = NotFound desc = could not find container \"115a3de742bed40baecd3f8e0729b5403f4fc9e9bcf2ceaddc69705dff7e1eb6\": container with ID starting with 115a3de742bed40baecd3f8e0729b5403f4fc9e9bcf2ceaddc69705dff7e1eb6 not found: ID does not exist" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.542115 4689 scope.go:117] "RemoveContainer" containerID="6c985bac0347f948e8801a1b28c777b0e021a335328f39d47991fef6fa76e953" Oct 13 21:28:04 crc kubenswrapper[4689]: E1013 21:28:04.542965 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c985bac0347f948e8801a1b28c777b0e021a335328f39d47991fef6fa76e953\": container with ID starting with 6c985bac0347f948e8801a1b28c777b0e021a335328f39d47991fef6fa76e953 not found: ID does not exist" containerID="6c985bac0347f948e8801a1b28c777b0e021a335328f39d47991fef6fa76e953" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.542993 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c985bac0347f948e8801a1b28c777b0e021a335328f39d47991fef6fa76e953"} err="failed to get container status \"6c985bac0347f948e8801a1b28c777b0e021a335328f39d47991fef6fa76e953\": rpc error: code = NotFound desc = could not find container \"6c985bac0347f948e8801a1b28c777b0e021a335328f39d47991fef6fa76e953\": container with ID starting with 6c985bac0347f948e8801a1b28c777b0e021a335328f39d47991fef6fa76e953 not found: ID does not exist" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.553859 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-log-httpd\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.553957 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzd7c\" (UniqueName: \"kubernetes.io/projected/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-kube-api-access-tzd7c\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.554055 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.554112 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-config-data\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.554175 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.554202 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-run-httpd\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.554247 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-scripts\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.555220 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-log-httpd\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.560118 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-scripts\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.561492 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-run-httpd\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.564393 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.564456 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-config-data\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.564839 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.573098 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzd7c\" (UniqueName: \"kubernetes.io/projected/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-kube-api-access-tzd7c\") pod \"ceilometer-0\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " pod="openstack/ceilometer-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.608002 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 13 21:28:04 crc kubenswrapper[4689]: I1013 21:28:04.702348 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:05 crc kubenswrapper[4689]: I1013 21:28:05.091068 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:05 crc kubenswrapper[4689]: I1013 21:28:05.230074 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:05 crc kubenswrapper[4689]: I1013 21:28:05.296393 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1335c6bc-6a9e-42c9-a7a4-c01b1a340803","Type":"ContainerStarted","Data":"c7c3a2eefef37e732cd170abcc1574d6f5a3a90eab6da9a3202f0c619d9277a4"} Oct 13 21:28:05 crc kubenswrapper[4689]: I1013 21:28:05.624066 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:28:05 crc kubenswrapper[4689]: I1013 21:28:05.625564 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:28:05 crc kubenswrapper[4689]: I1013 21:28:05.907461 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e13bd179-aefc-4620-92fd-d4563d7b7f72" path="/var/lib/kubelet/pods/e13bd179-aefc-4620-92fd-d4563d7b7f72/volumes" Oct 13 21:28:06 crc kubenswrapper[4689]: I1013 21:28:06.314747 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1335c6bc-6a9e-42c9-a7a4-c01b1a340803","Type":"ContainerStarted","Data":"4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c"} Oct 13 21:28:08 crc kubenswrapper[4689]: I1013 21:28:08.431014 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:28:08 crc kubenswrapper[4689]: I1013 21:28:08.544857 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6545bbd774-g576b" Oct 13 21:28:08 crc kubenswrapper[4689]: I1013 21:28:08.620192 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-555854dc66-rf9mg"] Oct 13 21:28:08 crc kubenswrapper[4689]: I1013 21:28:08.620863 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-555854dc66-rf9mg" podUID="82c5049d-0d61-4749-a025-85d112c1c9a4" containerName="barbican-api-log" containerID="cri-o://f52a6e503c9a7db1bafe202035f1e951b8462908eadb1de89f23ddfc24300c78" gracePeriod=30 Oct 13 21:28:08 crc kubenswrapper[4689]: I1013 21:28:08.620982 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-555854dc66-rf9mg" podUID="82c5049d-0d61-4749-a025-85d112c1c9a4" containerName="barbican-api" containerID="cri-o://139efe3f65346100f078d6bc1bf4c709d269dee77a578778149c4f702314d58d" gracePeriod=30 Oct 13 21:28:09 crc kubenswrapper[4689]: E1013 21:28:09.120855 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod82c5049d_0d61_4749_a025_85d112c1c9a4.slice/crio-conmon-f52a6e503c9a7db1bafe202035f1e951b8462908eadb1de89f23ddfc24300c78.scope\": RecentStats: unable to find data in memory cache]" Oct 13 21:28:09 crc kubenswrapper[4689]: I1013 21:28:09.257465 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:28:09 crc kubenswrapper[4689]: I1013 21:28:09.260379 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5f54846cc7-sc4qr" Oct 13 21:28:09 crc kubenswrapper[4689]: I1013 21:28:09.379063 4689 generic.go:334] "Generic (PLEG): container finished" podID="82c5049d-0d61-4749-a025-85d112c1c9a4" containerID="f52a6e503c9a7db1bafe202035f1e951b8462908eadb1de89f23ddfc24300c78" exitCode=143 Oct 13 21:28:09 crc kubenswrapper[4689]: I1013 21:28:09.380426 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-555854dc66-rf9mg" event={"ID":"82c5049d-0d61-4749-a025-85d112c1c9a4","Type":"ContainerDied","Data":"f52a6e503c9a7db1bafe202035f1e951b8462908eadb1de89f23ddfc24300c78"} Oct 13 21:28:09 crc kubenswrapper[4689]: I1013 21:28:09.793343 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:28:09 crc kubenswrapper[4689]: I1013 21:28:09.819416 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 13 21:28:09 crc kubenswrapper[4689]: I1013 21:28:09.924105 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-dgjln"] Oct 13 21:28:09 crc kubenswrapper[4689]: I1013 21:28:09.924162 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 21:28:09 crc kubenswrapper[4689]: I1013 21:28:09.924402 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" podUID="28327f70-7d3e-489e-8c99-85d7d4716534" containerName="dnsmasq-dns" containerID="cri-o://fa96089fee36a380541dfb45c9de73e9efc20c4d03bf05d7180ecce38cfbb864" gracePeriod=10 Oct 13 21:28:10 crc kubenswrapper[4689]: I1013 21:28:10.402161 4689 generic.go:334] "Generic (PLEG): container finished" podID="28327f70-7d3e-489e-8c99-85d7d4716534" containerID="fa96089fee36a380541dfb45c9de73e9efc20c4d03bf05d7180ecce38cfbb864" exitCode=0 Oct 13 21:28:10 crc kubenswrapper[4689]: I1013 21:28:10.402232 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" event={"ID":"28327f70-7d3e-489e-8c99-85d7d4716534","Type":"ContainerDied","Data":"fa96089fee36a380541dfb45c9de73e9efc20c4d03bf05d7180ecce38cfbb864"} Oct 13 21:28:10 crc kubenswrapper[4689]: I1013 21:28:10.402632 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="8db5993f-1756-4551-bd90-dd15df0a1a49" containerName="cinder-scheduler" containerID="cri-o://25b63955eba5787d1654fbadad61e2738b473b745a58b6863bfec581ea7aeaf1" gracePeriod=30 Oct 13 21:28:10 crc kubenswrapper[4689]: I1013 21:28:10.402665 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="8db5993f-1756-4551-bd90-dd15df0a1a49" containerName="probe" containerID="cri-o://d270eefa85039c475c36cae95583532e1a1b3ec39abaa65aa36190af87820035" gracePeriod=30 Oct 13 21:28:11 crc kubenswrapper[4689]: I1013 21:28:11.078883 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" podUID="28327f70-7d3e-489e-8c99-85d7d4716534" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.139:5353: connect: connection refused" Oct 13 21:28:11 crc kubenswrapper[4689]: I1013 21:28:11.411686 4689 generic.go:334] "Generic (PLEG): container finished" podID="8db5993f-1756-4551-bd90-dd15df0a1a49" containerID="d270eefa85039c475c36cae95583532e1a1b3ec39abaa65aa36190af87820035" exitCode=0 Oct 13 21:28:11 crc kubenswrapper[4689]: I1013 21:28:11.411729 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8db5993f-1756-4551-bd90-dd15df0a1a49","Type":"ContainerDied","Data":"d270eefa85039c475c36cae95583532e1a1b3ec39abaa65aa36190af87820035"} Oct 13 21:28:11 crc kubenswrapper[4689]: I1013 21:28:11.630240 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-795fd646bb-2s89l" podUID="fd448066-1b70-4e35-959c-5c702d87560f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Oct 13 21:28:11 crc kubenswrapper[4689]: I1013 21:28:11.630363 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.064069 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.198119 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-tnlv4"] Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.200118 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-tnlv4" Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.206095 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-tnlv4"] Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.283288 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-cv8cg"] Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.284396 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-cv8cg" Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.294934 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-cv8cg"] Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.296310 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p786w\" (UniqueName: \"kubernetes.io/projected/cc382ef2-90d0-4dd4-89cb-23d5e4dd9327-kube-api-access-p786w\") pod \"nova-api-db-create-tnlv4\" (UID: \"cc382ef2-90d0-4dd4-89cb-23d5e4dd9327\") " pod="openstack/nova-api-db-create-tnlv4" Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.390087 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-kwxsd"] Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.392153 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-kwxsd" Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.398540 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-767q9\" (UniqueName: \"kubernetes.io/projected/e882c36f-61b5-436d-ba2e-94f12bbb5010-kube-api-access-767q9\") pod \"nova-cell0-db-create-cv8cg\" (UID: \"e882c36f-61b5-436d-ba2e-94f12bbb5010\") " pod="openstack/nova-cell0-db-create-cv8cg" Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.398611 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p786w\" (UniqueName: \"kubernetes.io/projected/cc382ef2-90d0-4dd4-89cb-23d5e4dd9327-kube-api-access-p786w\") pod \"nova-api-db-create-tnlv4\" (UID: \"cc382ef2-90d0-4dd4-89cb-23d5e4dd9327\") " pod="openstack/nova-api-db-create-tnlv4" Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.411659 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-kwxsd"] Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.430949 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p786w\" (UniqueName: \"kubernetes.io/projected/cc382ef2-90d0-4dd4-89cb-23d5e4dd9327-kube-api-access-p786w\") pod \"nova-api-db-create-tnlv4\" (UID: \"cc382ef2-90d0-4dd4-89cb-23d5e4dd9327\") " pod="openstack/nova-api-db-create-tnlv4" Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.436700 4689 generic.go:334] "Generic (PLEG): container finished" podID="82c5049d-0d61-4749-a025-85d112c1c9a4" containerID="139efe3f65346100f078d6bc1bf4c709d269dee77a578778149c4f702314d58d" exitCode=0 Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.436744 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-555854dc66-rf9mg" event={"ID":"82c5049d-0d61-4749-a025-85d112c1c9a4","Type":"ContainerDied","Data":"139efe3f65346100f078d6bc1bf4c709d269dee77a578778149c4f702314d58d"} Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.500441 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4lbr\" (UniqueName: \"kubernetes.io/projected/132dcc8a-a3c1-4c02-9cd8-be28bf0e006b-kube-api-access-x4lbr\") pod \"nova-cell1-db-create-kwxsd\" (UID: \"132dcc8a-a3c1-4c02-9cd8-be28bf0e006b\") " pod="openstack/nova-cell1-db-create-kwxsd" Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.500532 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-767q9\" (UniqueName: \"kubernetes.io/projected/e882c36f-61b5-436d-ba2e-94f12bbb5010-kube-api-access-767q9\") pod \"nova-cell0-db-create-cv8cg\" (UID: \"e882c36f-61b5-436d-ba2e-94f12bbb5010\") " pod="openstack/nova-cell0-db-create-cv8cg" Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.523620 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-767q9\" (UniqueName: \"kubernetes.io/projected/e882c36f-61b5-436d-ba2e-94f12bbb5010-kube-api-access-767q9\") pod \"nova-cell0-db-create-cv8cg\" (UID: \"e882c36f-61b5-436d-ba2e-94f12bbb5010\") " pod="openstack/nova-cell0-db-create-cv8cg" Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.528507 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-tnlv4" Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.602376 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4lbr\" (UniqueName: \"kubernetes.io/projected/132dcc8a-a3c1-4c02-9cd8-be28bf0e006b-kube-api-access-x4lbr\") pod \"nova-cell1-db-create-kwxsd\" (UID: \"132dcc8a-a3c1-4c02-9cd8-be28bf0e006b\") " pod="openstack/nova-cell1-db-create-kwxsd" Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.608423 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-cv8cg" Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.620345 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4lbr\" (UniqueName: \"kubernetes.io/projected/132dcc8a-a3c1-4c02-9cd8-be28bf0e006b-kube-api-access-x4lbr\") pod \"nova-cell1-db-create-kwxsd\" (UID: \"132dcc8a-a3c1-4c02-9cd8-be28bf0e006b\") " pod="openstack/nova-cell1-db-create-kwxsd" Oct 13 21:28:12 crc kubenswrapper[4689]: I1013 21:28:12.719786 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-kwxsd" Oct 13 21:28:13 crc kubenswrapper[4689]: I1013 21:28:13.629246 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-555854dc66-rf9mg" podUID="82c5049d-0d61-4749-a025-85d112c1c9a4" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.164:9311/healthcheck\": dial tcp 10.217.0.164:9311: connect: connection refused" Oct 13 21:28:13 crc kubenswrapper[4689]: I1013 21:28:13.629388 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-555854dc66-rf9mg" podUID="82c5049d-0d61-4749-a025-85d112c1c9a4" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.164:9311/healthcheck\": dial tcp 10.217.0.164:9311: connect: connection refused" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.325861 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.375774 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.462812 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-dns-swift-storage-0\") pod \"28327f70-7d3e-489e-8c99-85d7d4716534\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.462899 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-config\") pod \"28327f70-7d3e-489e-8c99-85d7d4716534\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.462957 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-ovsdbserver-nb\") pod \"28327f70-7d3e-489e-8c99-85d7d4716534\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.463015 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g982t\" (UniqueName: \"kubernetes.io/projected/28327f70-7d3e-489e-8c99-85d7d4716534-kube-api-access-g982t\") pod \"28327f70-7d3e-489e-8c99-85d7d4716534\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.463403 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-dns-svc\") pod \"28327f70-7d3e-489e-8c99-85d7d4716534\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.463446 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-ovsdbserver-sb\") pod \"28327f70-7d3e-489e-8c99-85d7d4716534\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.476666 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28327f70-7d3e-489e-8c99-85d7d4716534-kube-api-access-g982t" (OuterVolumeSpecName: "kube-api-access-g982t") pod "28327f70-7d3e-489e-8c99-85d7d4716534" (UID: "28327f70-7d3e-489e-8c99-85d7d4716534"). InnerVolumeSpecName "kube-api-access-g982t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.493528 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.495417 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-dgjln" event={"ID":"28327f70-7d3e-489e-8c99-85d7d4716534","Type":"ContainerDied","Data":"c3cee51d9c5b156a6ba4742b89c2515a272613ce71294b7e5952fbe023e28321"} Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.495484 4689 scope.go:117] "RemoveContainer" containerID="fa96089fee36a380541dfb45c9de73e9efc20c4d03bf05d7180ecce38cfbb864" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.496509 4689 generic.go:334] "Generic (PLEG): container finished" podID="8db5993f-1756-4551-bd90-dd15df0a1a49" containerID="25b63955eba5787d1654fbadad61e2738b473b745a58b6863bfec581ea7aeaf1" exitCode=0 Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.496551 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8db5993f-1756-4551-bd90-dd15df0a1a49","Type":"ContainerDied","Data":"25b63955eba5787d1654fbadad61e2738b473b745a58b6863bfec581ea7aeaf1"} Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.501342 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1335c6bc-6a9e-42c9-a7a4-c01b1a340803","Type":"ContainerStarted","Data":"6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578"} Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.506551 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-555854dc66-rf9mg" event={"ID":"82c5049d-0d61-4749-a025-85d112c1c9a4","Type":"ContainerDied","Data":"3f1f053189d9cd93d619291923a717a5816ac12dab93b2cb3a0ba7d94f81b3e5"} Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.506675 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-555854dc66-rf9mg" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.515359 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"10f0cb83-9cb9-48d1-8b9e-2217c48790d9","Type":"ContainerStarted","Data":"ef7aa89b4f89118ddf60542334eae7e34828809f6afab1b0c077d3b76c9efe98"} Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.522842 4689 scope.go:117] "RemoveContainer" containerID="bf132da6b42d038f48fa05919ce8e6d608c1c62a42bd3b7e30908a2c38fd99bd" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.540705 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "28327f70-7d3e-489e-8c99-85d7d4716534" (UID: "28327f70-7d3e-489e-8c99-85d7d4716534"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.541701 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.7159333 podStartE2EDuration="24.54168256s" podCreationTimestamp="2025-10-13 21:27:50 +0000 UTC" firstStartedPulling="2025-10-13 21:27:52.193079962 +0000 UTC m=+989.111325047" lastFinishedPulling="2025-10-13 21:28:14.018829222 +0000 UTC m=+1010.937074307" observedRunningTime="2025-10-13 21:28:14.534619623 +0000 UTC m=+1011.452864708" watchObservedRunningTime="2025-10-13 21:28:14.54168256 +0000 UTC m=+1011.459927645" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.551641 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "28327f70-7d3e-489e-8c99-85d7d4716534" (UID: "28327f70-7d3e-489e-8c99-85d7d4716534"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.558291 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-config" (OuterVolumeSpecName: "config") pod "28327f70-7d3e-489e-8c99-85d7d4716534" (UID: "28327f70-7d3e-489e-8c99-85d7d4716534"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.565340 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "28327f70-7d3e-489e-8c99-85d7d4716534" (UID: "28327f70-7d3e-489e-8c99-85d7d4716534"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.565364 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-config-data\") pod \"82c5049d-0d61-4749-a025-85d112c1c9a4\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.565539 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-dns-swift-storage-0\") pod \"28327f70-7d3e-489e-8c99-85d7d4716534\" (UID: \"28327f70-7d3e-489e-8c99-85d7d4716534\") " Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.565610 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-combined-ca-bundle\") pod \"82c5049d-0d61-4749-a025-85d112c1c9a4\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " Oct 13 21:28:14 crc kubenswrapper[4689]: W1013 21:28:14.565666 4689 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/28327f70-7d3e-489e-8c99-85d7d4716534/volumes/kubernetes.io~configmap/dns-swift-storage-0 Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.565687 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "28327f70-7d3e-489e-8c99-85d7d4716534" (UID: "28327f70-7d3e-489e-8c99-85d7d4716534"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.565764 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rd77\" (UniqueName: \"kubernetes.io/projected/82c5049d-0d61-4749-a025-85d112c1c9a4-kube-api-access-5rd77\") pod \"82c5049d-0d61-4749-a025-85d112c1c9a4\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.565844 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-config-data-custom\") pod \"82c5049d-0d61-4749-a025-85d112c1c9a4\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.565876 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/82c5049d-0d61-4749-a025-85d112c1c9a4-logs\") pod \"82c5049d-0d61-4749-a025-85d112c1c9a4\" (UID: \"82c5049d-0d61-4749-a025-85d112c1c9a4\") " Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.566665 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82c5049d-0d61-4749-a025-85d112c1c9a4-logs" (OuterVolumeSpecName: "logs") pod "82c5049d-0d61-4749-a025-85d112c1c9a4" (UID: "82c5049d-0d61-4749-a025-85d112c1c9a4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.566939 4689 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/82c5049d-0d61-4749-a025-85d112c1c9a4-logs\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.566958 4689 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.566970 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.566978 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.566988 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g982t\" (UniqueName: \"kubernetes.io/projected/28327f70-7d3e-489e-8c99-85d7d4716534-kube-api-access-g982t\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.566997 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.569417 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "82c5049d-0d61-4749-a025-85d112c1c9a4" (UID: "82c5049d-0d61-4749-a025-85d112c1c9a4"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.569723 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82c5049d-0d61-4749-a025-85d112c1c9a4-kube-api-access-5rd77" (OuterVolumeSpecName: "kube-api-access-5rd77") pod "82c5049d-0d61-4749-a025-85d112c1c9a4" (UID: "82c5049d-0d61-4749-a025-85d112c1c9a4"). InnerVolumeSpecName "kube-api-access-5rd77". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.576891 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "28327f70-7d3e-489e-8c99-85d7d4716534" (UID: "28327f70-7d3e-489e-8c99-85d7d4716534"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.577130 4689 scope.go:117] "RemoveContainer" containerID="139efe3f65346100f078d6bc1bf4c709d269dee77a578778149c4f702314d58d" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.584631 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-kwxsd"] Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.600818 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "82c5049d-0d61-4749-a025-85d112c1c9a4" (UID: "82c5049d-0d61-4749-a025-85d112c1c9a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.610436 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-tnlv4"] Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.635026 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-config-data" (OuterVolumeSpecName: "config-data") pod "82c5049d-0d61-4749-a025-85d112c1c9a4" (UID: "82c5049d-0d61-4749-a025-85d112c1c9a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.638016 4689 scope.go:117] "RemoveContainer" containerID="f52a6e503c9a7db1bafe202035f1e951b8462908eadb1de89f23ddfc24300c78" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.669120 4689 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.669160 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.669173 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82c5049d-0d61-4749-a025-85d112c1c9a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.669184 4689 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/28327f70-7d3e-489e-8c99-85d7d4716534-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.669195 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rd77\" (UniqueName: \"kubernetes.io/projected/82c5049d-0d61-4749-a025-85d112c1c9a4-kube-api-access-5rd77\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.749550 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-cv8cg"] Oct 13 21:28:14 crc kubenswrapper[4689]: W1013 21:28:14.756462 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode882c36f_61b5_436d_ba2e_94f12bbb5010.slice/crio-bba3b31dfbe949043f6d6bb950bfb939cb4381a89c5807547311e9568fd7ba07 WatchSource:0}: Error finding container bba3b31dfbe949043f6d6bb950bfb939cb4381a89c5807547311e9568fd7ba07: Status 404 returned error can't find the container with id bba3b31dfbe949043f6d6bb950bfb939cb4381a89c5807547311e9568fd7ba07 Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.834345 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-dgjln"] Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.843876 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-dgjln"] Oct 13 21:28:14 crc kubenswrapper[4689]: I1013 21:28:14.975074 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.204791 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-555854dc66-rf9mg"] Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.214722 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.216868 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-555854dc66-rf9mg"] Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.379814 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-config-data-custom\") pod \"8db5993f-1756-4551-bd90-dd15df0a1a49\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.379918 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-config-data\") pod \"8db5993f-1756-4551-bd90-dd15df0a1a49\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.380005 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-combined-ca-bundle\") pod \"8db5993f-1756-4551-bd90-dd15df0a1a49\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.380043 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-scripts\") pod \"8db5993f-1756-4551-bd90-dd15df0a1a49\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.380089 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8db5993f-1756-4551-bd90-dd15df0a1a49-etc-machine-id\") pod \"8db5993f-1756-4551-bd90-dd15df0a1a49\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.380146 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rbn7\" (UniqueName: \"kubernetes.io/projected/8db5993f-1756-4551-bd90-dd15df0a1a49-kube-api-access-9rbn7\") pod \"8db5993f-1756-4551-bd90-dd15df0a1a49\" (UID: \"8db5993f-1756-4551-bd90-dd15df0a1a49\") " Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.382956 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8db5993f-1756-4551-bd90-dd15df0a1a49-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "8db5993f-1756-4551-bd90-dd15df0a1a49" (UID: "8db5993f-1756-4551-bd90-dd15df0a1a49"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.387834 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8db5993f-1756-4551-bd90-dd15df0a1a49" (UID: "8db5993f-1756-4551-bd90-dd15df0a1a49"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.387877 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8db5993f-1756-4551-bd90-dd15df0a1a49-kube-api-access-9rbn7" (OuterVolumeSpecName: "kube-api-access-9rbn7") pod "8db5993f-1756-4551-bd90-dd15df0a1a49" (UID: "8db5993f-1756-4551-bd90-dd15df0a1a49"). InnerVolumeSpecName "kube-api-access-9rbn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.392429 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-scripts" (OuterVolumeSpecName: "scripts") pod "8db5993f-1756-4551-bd90-dd15df0a1a49" (UID: "8db5993f-1756-4551-bd90-dd15df0a1a49"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.453101 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8db5993f-1756-4551-bd90-dd15df0a1a49" (UID: "8db5993f-1756-4551-bd90-dd15df0a1a49"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.481832 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rbn7\" (UniqueName: \"kubernetes.io/projected/8db5993f-1756-4551-bd90-dd15df0a1a49-kube-api-access-9rbn7\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.481870 4689 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.481879 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.481889 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.481897 4689 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8db5993f-1756-4551-bd90-dd15df0a1a49-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.493552 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-config-data" (OuterVolumeSpecName: "config-data") pod "8db5993f-1756-4551-bd90-dd15df0a1a49" (UID: "8db5993f-1756-4551-bd90-dd15df0a1a49"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.523687 4689 generic.go:334] "Generic (PLEG): container finished" podID="132dcc8a-a3c1-4c02-9cd8-be28bf0e006b" containerID="ebf8c306b50e29cbcf0346c0e80129c61860578949d004d3973700d20dd7a837" exitCode=0 Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.523743 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-kwxsd" event={"ID":"132dcc8a-a3c1-4c02-9cd8-be28bf0e006b","Type":"ContainerDied","Data":"ebf8c306b50e29cbcf0346c0e80129c61860578949d004d3973700d20dd7a837"} Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.524001 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-kwxsd" event={"ID":"132dcc8a-a3c1-4c02-9cd8-be28bf0e006b","Type":"ContainerStarted","Data":"eab3ddb2145ac15daf0231d4463f5e8cbc02c2c92a689a536a4ae1495ee8b72a"} Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.526741 4689 generic.go:334] "Generic (PLEG): container finished" podID="e882c36f-61b5-436d-ba2e-94f12bbb5010" containerID="4d1b81dc38686bdd5314eb45f9b87aacca3c98b674c78e1c0417860ace12fe48" exitCode=0 Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.526812 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-cv8cg" event={"ID":"e882c36f-61b5-436d-ba2e-94f12bbb5010","Type":"ContainerDied","Data":"4d1b81dc38686bdd5314eb45f9b87aacca3c98b674c78e1c0417860ace12fe48"} Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.526831 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-cv8cg" event={"ID":"e882c36f-61b5-436d-ba2e-94f12bbb5010","Type":"ContainerStarted","Data":"bba3b31dfbe949043f6d6bb950bfb939cb4381a89c5807547311e9568fd7ba07"} Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.528253 4689 generic.go:334] "Generic (PLEG): container finished" podID="cc382ef2-90d0-4dd4-89cb-23d5e4dd9327" containerID="2beb6bf6d4d5601040839fdeaa62b18849de0f930f1dca4c2b8a47ccdf67c4b0" exitCode=0 Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.528288 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-tnlv4" event={"ID":"cc382ef2-90d0-4dd4-89cb-23d5e4dd9327","Type":"ContainerDied","Data":"2beb6bf6d4d5601040839fdeaa62b18849de0f930f1dca4c2b8a47ccdf67c4b0"} Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.528328 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-tnlv4" event={"ID":"cc382ef2-90d0-4dd4-89cb-23d5e4dd9327","Type":"ContainerStarted","Data":"0b7112c2556f8b7bd36c5ffd9a273b48799019158e1e7515dbf487dc09e000ae"} Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.530920 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8db5993f-1756-4551-bd90-dd15df0a1a49","Type":"ContainerDied","Data":"a9f8d806968723c4d618377432e2280e94db66b63e30769425b3fd2074de70c2"} Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.530944 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.530962 4689 scope.go:117] "RemoveContainer" containerID="d270eefa85039c475c36cae95583532e1a1b3ec39abaa65aa36190af87820035" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.534308 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1335c6bc-6a9e-42c9-a7a4-c01b1a340803","Type":"ContainerStarted","Data":"4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3"} Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.551475 4689 scope.go:117] "RemoveContainer" containerID="25b63955eba5787d1654fbadad61e2738b473b745a58b6863bfec581ea7aeaf1" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.578428 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.583635 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8db5993f-1756-4551-bd90-dd15df0a1a49-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.586976 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.598637 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 21:28:15 crc kubenswrapper[4689]: E1013 21:28:15.598990 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28327f70-7d3e-489e-8c99-85d7d4716534" containerName="dnsmasq-dns" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.599007 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="28327f70-7d3e-489e-8c99-85d7d4716534" containerName="dnsmasq-dns" Oct 13 21:28:15 crc kubenswrapper[4689]: E1013 21:28:15.599021 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8db5993f-1756-4551-bd90-dd15df0a1a49" containerName="cinder-scheduler" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.599030 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8db5993f-1756-4551-bd90-dd15df0a1a49" containerName="cinder-scheduler" Oct 13 21:28:15 crc kubenswrapper[4689]: E1013 21:28:15.599041 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8db5993f-1756-4551-bd90-dd15df0a1a49" containerName="probe" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.599047 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8db5993f-1756-4551-bd90-dd15df0a1a49" containerName="probe" Oct 13 21:28:15 crc kubenswrapper[4689]: E1013 21:28:15.599059 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82c5049d-0d61-4749-a025-85d112c1c9a4" containerName="barbican-api" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.599067 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="82c5049d-0d61-4749-a025-85d112c1c9a4" containerName="barbican-api" Oct 13 21:28:15 crc kubenswrapper[4689]: E1013 21:28:15.599086 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28327f70-7d3e-489e-8c99-85d7d4716534" containerName="init" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.599091 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="28327f70-7d3e-489e-8c99-85d7d4716534" containerName="init" Oct 13 21:28:15 crc kubenswrapper[4689]: E1013 21:28:15.599115 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82c5049d-0d61-4749-a025-85d112c1c9a4" containerName="barbican-api-log" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.599122 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="82c5049d-0d61-4749-a025-85d112c1c9a4" containerName="barbican-api-log" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.599301 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="82c5049d-0d61-4749-a025-85d112c1c9a4" containerName="barbican-api" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.599319 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="28327f70-7d3e-489e-8c99-85d7d4716534" containerName="dnsmasq-dns" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.599333 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="8db5993f-1756-4551-bd90-dd15df0a1a49" containerName="cinder-scheduler" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.599347 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="8db5993f-1756-4551-bd90-dd15df0a1a49" containerName="probe" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.599362 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="82c5049d-0d61-4749-a025-85d112c1c9a4" containerName="barbican-api-log" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.600262 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.606836 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.653620 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.787057 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.787640 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-config-data\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.787710 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.787901 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-scripts\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.788068 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.788115 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vd7b8\" (UniqueName: \"kubernetes.io/projected/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-kube-api-access-vd7b8\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.878796 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28327f70-7d3e-489e-8c99-85d7d4716534" path="/var/lib/kubelet/pods/28327f70-7d3e-489e-8c99-85d7d4716534/volumes" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.879711 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82c5049d-0d61-4749-a025-85d112c1c9a4" path="/var/lib/kubelet/pods/82c5049d-0d61-4749-a025-85d112c1c9a4/volumes" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.880311 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8db5993f-1756-4551-bd90-dd15df0a1a49" path="/var/lib/kubelet/pods/8db5993f-1756-4551-bd90-dd15df0a1a49/volumes" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.889613 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-config-data\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.889675 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.889713 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-scripts\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.889767 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.889798 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vd7b8\" (UniqueName: \"kubernetes.io/projected/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-kube-api-access-vd7b8\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.889826 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.889854 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.895939 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-scripts\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.896231 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-config-data\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.897097 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.898378 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.916282 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vd7b8\" (UniqueName: \"kubernetes.io/projected/0d79575d-670f-47b0-83a6-9c2b36f8ffd0-kube-api-access-vd7b8\") pod \"cinder-scheduler-0\" (UID: \"0d79575d-670f-47b0-83a6-9c2b36f8ffd0\") " pod="openstack/cinder-scheduler-0" Oct 13 21:28:15 crc kubenswrapper[4689]: I1013 21:28:15.927082 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 13 21:28:16 crc kubenswrapper[4689]: I1013 21:28:16.216921 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 21:28:16 crc kubenswrapper[4689]: I1013 21:28:16.224460 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c0bc4829-b428-4d71-aeea-719aa026dcc0" containerName="glance-log" containerID="cri-o://4a838a606986f7a7b6fbde47b397b75c844d3997c4c66c8fb463b4c7075d80e1" gracePeriod=30 Oct 13 21:28:16 crc kubenswrapper[4689]: I1013 21:28:16.224631 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c0bc4829-b428-4d71-aeea-719aa026dcc0" containerName="glance-httpd" containerID="cri-o://1298847ffef436f01cca433afe39c7766f3cd21da136c5e98d34689a544c66d9" gracePeriod=30 Oct 13 21:28:16 crc kubenswrapper[4689]: I1013 21:28:16.438156 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 13 21:28:16 crc kubenswrapper[4689]: I1013 21:28:16.561214 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0d79575d-670f-47b0-83a6-9c2b36f8ffd0","Type":"ContainerStarted","Data":"6089a412ff33649372f559b0233934c54679145e338c1d33e2a579b40e867e1e"} Oct 13 21:28:16 crc kubenswrapper[4689]: I1013 21:28:16.564983 4689 generic.go:334] "Generic (PLEG): container finished" podID="c0bc4829-b428-4d71-aeea-719aa026dcc0" containerID="4a838a606986f7a7b6fbde47b397b75c844d3997c4c66c8fb463b4c7075d80e1" exitCode=143 Oct 13 21:28:16 crc kubenswrapper[4689]: I1013 21:28:16.565091 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c0bc4829-b428-4d71-aeea-719aa026dcc0","Type":"ContainerDied","Data":"4a838a606986f7a7b6fbde47b397b75c844d3997c4c66c8fb463b4c7075d80e1"} Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.103071 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-tnlv4" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.222147 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p786w\" (UniqueName: \"kubernetes.io/projected/cc382ef2-90d0-4dd4-89cb-23d5e4dd9327-kube-api-access-p786w\") pod \"cc382ef2-90d0-4dd4-89cb-23d5e4dd9327\" (UID: \"cc382ef2-90d0-4dd4-89cb-23d5e4dd9327\") " Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.231951 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc382ef2-90d0-4dd4-89cb-23d5e4dd9327-kube-api-access-p786w" (OuterVolumeSpecName: "kube-api-access-p786w") pod "cc382ef2-90d0-4dd4-89cb-23d5e4dd9327" (UID: "cc382ef2-90d0-4dd4-89cb-23d5e4dd9327"). InnerVolumeSpecName "kube-api-access-p786w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.270122 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-9b98684c9-9h5ml" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.349574 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p786w\" (UniqueName: \"kubernetes.io/projected/cc382ef2-90d0-4dd4-89cb-23d5e4dd9327-kube-api-access-p786w\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.359114 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-kwxsd" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.379969 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-86dc7995bd-76xtf"] Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.380185 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-86dc7995bd-76xtf" podUID="140a26a7-0308-4d50-b2ec-d2e55be6b812" containerName="neutron-api" containerID="cri-o://37d73f863fb58767978048393ee9d0ff0b55db0ac178ce34413ea96fce4a44b8" gracePeriod=30 Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.380294 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-86dc7995bd-76xtf" podUID="140a26a7-0308-4d50-b2ec-d2e55be6b812" containerName="neutron-httpd" containerID="cri-o://3d9c65c1bce1f25279f529b62b6ec8e12128ae3528a65493d767422116dd67e9" gracePeriod=30 Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.417972 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.430975 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-cv8cg" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.451450 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.452384 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="519df139-a232-4218-a9ba-4d626fe3d115" containerName="glance-log" containerID="cri-o://0882d60c720ad049ea0aca790fec7010716656be4bc9da02964eb76b95d9ac35" gracePeriod=30 Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.452529 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="519df139-a232-4218-a9ba-4d626fe3d115" containerName="glance-httpd" containerID="cri-o://1e326a825f48a16f44e192a8a7ba6b2954101832e8a8a1b8d0b084f1767a19a5" gracePeriod=30 Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.556245 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg8t2\" (UniqueName: \"kubernetes.io/projected/fd448066-1b70-4e35-959c-5c702d87560f-kube-api-access-mg8t2\") pod \"fd448066-1b70-4e35-959c-5c702d87560f\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.556293 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd448066-1b70-4e35-959c-5c702d87560f-config-data\") pod \"fd448066-1b70-4e35-959c-5c702d87560f\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.556314 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd448066-1b70-4e35-959c-5c702d87560f-scripts\") pod \"fd448066-1b70-4e35-959c-5c702d87560f\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.556437 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-767q9\" (UniqueName: \"kubernetes.io/projected/e882c36f-61b5-436d-ba2e-94f12bbb5010-kube-api-access-767q9\") pod \"e882c36f-61b5-436d-ba2e-94f12bbb5010\" (UID: \"e882c36f-61b5-436d-ba2e-94f12bbb5010\") " Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.556488 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4lbr\" (UniqueName: \"kubernetes.io/projected/132dcc8a-a3c1-4c02-9cd8-be28bf0e006b-kube-api-access-x4lbr\") pod \"132dcc8a-a3c1-4c02-9cd8-be28bf0e006b\" (UID: \"132dcc8a-a3c1-4c02-9cd8-be28bf0e006b\") " Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.556521 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd448066-1b70-4e35-959c-5c702d87560f-logs\") pod \"fd448066-1b70-4e35-959c-5c702d87560f\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.556570 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-combined-ca-bundle\") pod \"fd448066-1b70-4e35-959c-5c702d87560f\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.556610 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-horizon-tls-certs\") pod \"fd448066-1b70-4e35-959c-5c702d87560f\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.556662 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-horizon-secret-key\") pod \"fd448066-1b70-4e35-959c-5c702d87560f\" (UID: \"fd448066-1b70-4e35-959c-5c702d87560f\") " Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.560058 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd448066-1b70-4e35-959c-5c702d87560f-logs" (OuterVolumeSpecName: "logs") pod "fd448066-1b70-4e35-959c-5c702d87560f" (UID: "fd448066-1b70-4e35-959c-5c702d87560f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.574523 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd448066-1b70-4e35-959c-5c702d87560f-kube-api-access-mg8t2" (OuterVolumeSpecName: "kube-api-access-mg8t2") pod "fd448066-1b70-4e35-959c-5c702d87560f" (UID: "fd448066-1b70-4e35-959c-5c702d87560f"). InnerVolumeSpecName "kube-api-access-mg8t2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.577792 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/132dcc8a-a3c1-4c02-9cd8-be28bf0e006b-kube-api-access-x4lbr" (OuterVolumeSpecName: "kube-api-access-x4lbr") pod "132dcc8a-a3c1-4c02-9cd8-be28bf0e006b" (UID: "132dcc8a-a3c1-4c02-9cd8-be28bf0e006b"). InnerVolumeSpecName "kube-api-access-x4lbr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.580994 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "fd448066-1b70-4e35-959c-5c702d87560f" (UID: "fd448066-1b70-4e35-959c-5c702d87560f"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.581190 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e882c36f-61b5-436d-ba2e-94f12bbb5010-kube-api-access-767q9" (OuterVolumeSpecName: "kube-api-access-767q9") pod "e882c36f-61b5-436d-ba2e-94f12bbb5010" (UID: "e882c36f-61b5-436d-ba2e-94f12bbb5010"). InnerVolumeSpecName "kube-api-access-767q9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.635615 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd448066-1b70-4e35-959c-5c702d87560f-config-data" (OuterVolumeSpecName: "config-data") pod "fd448066-1b70-4e35-959c-5c702d87560f" (UID: "fd448066-1b70-4e35-959c-5c702d87560f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.641182 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-kwxsd" event={"ID":"132dcc8a-a3c1-4c02-9cd8-be28bf0e006b","Type":"ContainerDied","Data":"eab3ddb2145ac15daf0231d4463f5e8cbc02c2c92a689a536a4ae1495ee8b72a"} Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.641240 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eab3ddb2145ac15daf0231d4463f5e8cbc02c2c92a689a536a4ae1495ee8b72a" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.641314 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-kwxsd" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.652749 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd448066-1b70-4e35-959c-5c702d87560f" (UID: "fd448066-1b70-4e35-959c-5c702d87560f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.656339 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-cv8cg" event={"ID":"e882c36f-61b5-436d-ba2e-94f12bbb5010","Type":"ContainerDied","Data":"bba3b31dfbe949043f6d6bb950bfb939cb4381a89c5807547311e9568fd7ba07"} Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.656379 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bba3b31dfbe949043f6d6bb950bfb939cb4381a89c5807547311e9568fd7ba07" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.656436 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-cv8cg" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.658596 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-767q9\" (UniqueName: \"kubernetes.io/projected/e882c36f-61b5-436d-ba2e-94f12bbb5010-kube-api-access-767q9\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.658628 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4lbr\" (UniqueName: \"kubernetes.io/projected/132dcc8a-a3c1-4c02-9cd8-be28bf0e006b-kube-api-access-x4lbr\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.658640 4689 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd448066-1b70-4e35-959c-5c702d87560f-logs\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.658652 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.658663 4689 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.658675 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg8t2\" (UniqueName: \"kubernetes.io/projected/fd448066-1b70-4e35-959c-5c702d87560f-kube-api-access-mg8t2\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.658683 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd448066-1b70-4e35-959c-5c702d87560f-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.664438 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd448066-1b70-4e35-959c-5c702d87560f-scripts" (OuterVolumeSpecName: "scripts") pod "fd448066-1b70-4e35-959c-5c702d87560f" (UID: "fd448066-1b70-4e35-959c-5c702d87560f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.665055 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-tnlv4" event={"ID":"cc382ef2-90d0-4dd4-89cb-23d5e4dd9327","Type":"ContainerDied","Data":"0b7112c2556f8b7bd36c5ffd9a273b48799019158e1e7515dbf487dc09e000ae"} Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.665109 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b7112c2556f8b7bd36c5ffd9a273b48799019158e1e7515dbf487dc09e000ae" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.665191 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-tnlv4" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.686525 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "fd448066-1b70-4e35-959c-5c702d87560f" (UID: "fd448066-1b70-4e35-959c-5c702d87560f"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.706862 4689 generic.go:334] "Generic (PLEG): container finished" podID="fd448066-1b70-4e35-959c-5c702d87560f" containerID="0508f3a0b3e63cabdacf7e4be5cf5ff224833a9698433874f6b8ed378530891e" exitCode=137 Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.707019 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-795fd646bb-2s89l" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.708359 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-795fd646bb-2s89l" event={"ID":"fd448066-1b70-4e35-959c-5c702d87560f","Type":"ContainerDied","Data":"0508f3a0b3e63cabdacf7e4be5cf5ff224833a9698433874f6b8ed378530891e"} Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.708403 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-795fd646bb-2s89l" event={"ID":"fd448066-1b70-4e35-959c-5c702d87560f","Type":"ContainerDied","Data":"ce4aef8ec666f4112fbee29d2094287992ad575dfa2dcb5e6137937f6269eaf0"} Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.708429 4689 scope.go:117] "RemoveContainer" containerID="5d810c468ab93d2b6a2b55e29dbd0f0877bd15323cdf71fdbf9ad1a0b002bf83" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.726066 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0d79575d-670f-47b0-83a6-9c2b36f8ffd0","Type":"ContainerStarted","Data":"6a6eee7ccb27adeb3856291aa0938bf8dd171a33b4648b167f626f97295bf1a1"} Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.749449 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-795fd646bb-2s89l"] Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.757542 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-795fd646bb-2s89l"] Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.762672 4689 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd448066-1b70-4e35-959c-5c702d87560f-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.762699 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd448066-1b70-4e35-959c-5c702d87560f-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.894441 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd448066-1b70-4e35-959c-5c702d87560f" path="/var/lib/kubelet/pods/fd448066-1b70-4e35-959c-5c702d87560f/volumes" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.912835 4689 scope.go:117] "RemoveContainer" containerID="0508f3a0b3e63cabdacf7e4be5cf5ff224833a9698433874f6b8ed378530891e" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.958308 4689 scope.go:117] "RemoveContainer" containerID="5d810c468ab93d2b6a2b55e29dbd0f0877bd15323cdf71fdbf9ad1a0b002bf83" Oct 13 21:28:17 crc kubenswrapper[4689]: E1013 21:28:17.958755 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d810c468ab93d2b6a2b55e29dbd0f0877bd15323cdf71fdbf9ad1a0b002bf83\": container with ID starting with 5d810c468ab93d2b6a2b55e29dbd0f0877bd15323cdf71fdbf9ad1a0b002bf83 not found: ID does not exist" containerID="5d810c468ab93d2b6a2b55e29dbd0f0877bd15323cdf71fdbf9ad1a0b002bf83" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.958791 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d810c468ab93d2b6a2b55e29dbd0f0877bd15323cdf71fdbf9ad1a0b002bf83"} err="failed to get container status \"5d810c468ab93d2b6a2b55e29dbd0f0877bd15323cdf71fdbf9ad1a0b002bf83\": rpc error: code = NotFound desc = could not find container \"5d810c468ab93d2b6a2b55e29dbd0f0877bd15323cdf71fdbf9ad1a0b002bf83\": container with ID starting with 5d810c468ab93d2b6a2b55e29dbd0f0877bd15323cdf71fdbf9ad1a0b002bf83 not found: ID does not exist" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.958813 4689 scope.go:117] "RemoveContainer" containerID="0508f3a0b3e63cabdacf7e4be5cf5ff224833a9698433874f6b8ed378530891e" Oct 13 21:28:17 crc kubenswrapper[4689]: E1013 21:28:17.959101 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0508f3a0b3e63cabdacf7e4be5cf5ff224833a9698433874f6b8ed378530891e\": container with ID starting with 0508f3a0b3e63cabdacf7e4be5cf5ff224833a9698433874f6b8ed378530891e not found: ID does not exist" containerID="0508f3a0b3e63cabdacf7e4be5cf5ff224833a9698433874f6b8ed378530891e" Oct 13 21:28:17 crc kubenswrapper[4689]: I1013 21:28:17.959125 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0508f3a0b3e63cabdacf7e4be5cf5ff224833a9698433874f6b8ed378530891e"} err="failed to get container status \"0508f3a0b3e63cabdacf7e4be5cf5ff224833a9698433874f6b8ed378530891e\": rpc error: code = NotFound desc = could not find container \"0508f3a0b3e63cabdacf7e4be5cf5ff224833a9698433874f6b8ed378530891e\": container with ID starting with 0508f3a0b3e63cabdacf7e4be5cf5ff224833a9698433874f6b8ed378530891e not found: ID does not exist" Oct 13 21:28:18 crc kubenswrapper[4689]: I1013 21:28:18.759028 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1335c6bc-6a9e-42c9-a7a4-c01b1a340803","Type":"ContainerStarted","Data":"cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599"} Oct 13 21:28:18 crc kubenswrapper[4689]: I1013 21:28:18.759431 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 13 21:28:18 crc kubenswrapper[4689]: I1013 21:28:18.759281 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerName="proxy-httpd" containerID="cri-o://cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599" gracePeriod=30 Oct 13 21:28:18 crc kubenswrapper[4689]: I1013 21:28:18.759246 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerName="ceilometer-notification-agent" containerID="cri-o://6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578" gracePeriod=30 Oct 13 21:28:18 crc kubenswrapper[4689]: I1013 21:28:18.759318 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerName="sg-core" containerID="cri-o://4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3" gracePeriod=30 Oct 13 21:28:18 crc kubenswrapper[4689]: I1013 21:28:18.759335 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerName="ceilometer-central-agent" containerID="cri-o://4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c" gracePeriod=30 Oct 13 21:28:18 crc kubenswrapper[4689]: I1013 21:28:18.777627 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0d79575d-670f-47b0-83a6-9c2b36f8ffd0","Type":"ContainerStarted","Data":"c53e5a7ae7b65c9a5abe122fb9387cafe26da546b0caba74718dc1318bd23234"} Oct 13 21:28:18 crc kubenswrapper[4689]: I1013 21:28:18.801690 4689 generic.go:334] "Generic (PLEG): container finished" podID="519df139-a232-4218-a9ba-4d626fe3d115" containerID="0882d60c720ad049ea0aca790fec7010716656be4bc9da02964eb76b95d9ac35" exitCode=143 Oct 13 21:28:18 crc kubenswrapper[4689]: I1013 21:28:18.802055 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"519df139-a232-4218-a9ba-4d626fe3d115","Type":"ContainerDied","Data":"0882d60c720ad049ea0aca790fec7010716656be4bc9da02964eb76b95d9ac35"} Oct 13 21:28:18 crc kubenswrapper[4689]: I1013 21:28:18.810473 4689 generic.go:334] "Generic (PLEG): container finished" podID="140a26a7-0308-4d50-b2ec-d2e55be6b812" containerID="3d9c65c1bce1f25279f529b62b6ec8e12128ae3528a65493d767422116dd67e9" exitCode=0 Oct 13 21:28:18 crc kubenswrapper[4689]: I1013 21:28:18.810777 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-86dc7995bd-76xtf" event={"ID":"140a26a7-0308-4d50-b2ec-d2e55be6b812","Type":"ContainerDied","Data":"3d9c65c1bce1f25279f529b62b6ec8e12128ae3528a65493d767422116dd67e9"} Oct 13 21:28:18 crc kubenswrapper[4689]: I1013 21:28:18.812790 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.846756882 podStartE2EDuration="14.8127686s" podCreationTimestamp="2025-10-13 21:28:04 +0000 UTC" firstStartedPulling="2025-10-13 21:28:05.235811593 +0000 UTC m=+1002.154056678" lastFinishedPulling="2025-10-13 21:28:18.201823311 +0000 UTC m=+1015.120068396" observedRunningTime="2025-10-13 21:28:18.797645353 +0000 UTC m=+1015.715890438" watchObservedRunningTime="2025-10-13 21:28:18.8127686 +0000 UTC m=+1015.731013685" Oct 13 21:28:18 crc kubenswrapper[4689]: I1013 21:28:18.824738 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.824721892 podStartE2EDuration="3.824721892s" podCreationTimestamp="2025-10-13 21:28:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:28:18.822954291 +0000 UTC m=+1015.741199376" watchObservedRunningTime="2025-10-13 21:28:18.824721892 +0000 UTC m=+1015.742966967" Oct 13 21:28:19 crc kubenswrapper[4689]: E1013 21:28:19.486964 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1335c6bc_6a9e_42c9_a7a4_c01b1a340803.slice/crio-conmon-4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1335c6bc_6a9e_42c9_a7a4_c01b1a340803.slice/crio-conmon-6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1335c6bc_6a9e_42c9_a7a4_c01b1a340803.slice/crio-6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0bc4829_b428_4d71_aeea_719aa026dcc0.slice/crio-conmon-1298847ffef436f01cca433afe39c7766f3cd21da136c5e98d34689a544c66d9.scope\": RecentStats: unable to find data in memory cache]" Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.803217 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.852191 4689 generic.go:334] "Generic (PLEG): container finished" podID="c0bc4829-b428-4d71-aeea-719aa026dcc0" containerID="1298847ffef436f01cca433afe39c7766f3cd21da136c5e98d34689a544c66d9" exitCode=0 Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.852262 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c0bc4829-b428-4d71-aeea-719aa026dcc0","Type":"ContainerDied","Data":"1298847ffef436f01cca433afe39c7766f3cd21da136c5e98d34689a544c66d9"} Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.861053 4689 generic.go:334] "Generic (PLEG): container finished" podID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerID="cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599" exitCode=0 Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.861094 4689 generic.go:334] "Generic (PLEG): container finished" podID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerID="4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3" exitCode=2 Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.861103 4689 generic.go:334] "Generic (PLEG): container finished" podID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerID="6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578" exitCode=0 Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.861112 4689 generic.go:334] "Generic (PLEG): container finished" podID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerID="4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c" exitCode=0 Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.862151 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.862753 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1335c6bc-6a9e-42c9-a7a4-c01b1a340803","Type":"ContainerDied","Data":"cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599"} Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.862780 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1335c6bc-6a9e-42c9-a7a4-c01b1a340803","Type":"ContainerDied","Data":"4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3"} Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.862794 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1335c6bc-6a9e-42c9-a7a4-c01b1a340803","Type":"ContainerDied","Data":"6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578"} Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.862805 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1335c6bc-6a9e-42c9-a7a4-c01b1a340803","Type":"ContainerDied","Data":"4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c"} Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.862816 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1335c6bc-6a9e-42c9-a7a4-c01b1a340803","Type":"ContainerDied","Data":"c7c3a2eefef37e732cd170abcc1574d6f5a3a90eab6da9a3202f0c619d9277a4"} Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.862833 4689 scope.go:117] "RemoveContainer" containerID="cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599" Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.901814 4689 scope.go:117] "RemoveContainer" containerID="4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3" Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.923517 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-sg-core-conf-yaml\") pod \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.923602 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-combined-ca-bundle\") pod \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.923624 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-run-httpd\") pod \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.923670 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-scripts\") pod \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.923711 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzd7c\" (UniqueName: \"kubernetes.io/projected/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-kube-api-access-tzd7c\") pod \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.923743 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-config-data\") pod \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.923841 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-log-httpd\") pod \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\" (UID: \"1335c6bc-6a9e-42c9-a7a4-c01b1a340803\") " Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.930832 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1335c6bc-6a9e-42c9-a7a4-c01b1a340803" (UID: "1335c6bc-6a9e-42c9-a7a4-c01b1a340803"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:28:19 crc kubenswrapper[4689]: I1013 21:28:19.940896 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1335c6bc-6a9e-42c9-a7a4-c01b1a340803" (UID: "1335c6bc-6a9e-42c9-a7a4-c01b1a340803"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.000312 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-scripts" (OuterVolumeSpecName: "scripts") pod "1335c6bc-6a9e-42c9-a7a4-c01b1a340803" (UID: "1335c6bc-6a9e-42c9-a7a4-c01b1a340803"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.001191 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-kube-api-access-tzd7c" (OuterVolumeSpecName: "kube-api-access-tzd7c") pod "1335c6bc-6a9e-42c9-a7a4-c01b1a340803" (UID: "1335c6bc-6a9e-42c9-a7a4-c01b1a340803"). InnerVolumeSpecName "kube-api-access-tzd7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.001369 4689 scope.go:117] "RemoveContainer" containerID="6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.009834 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1335c6bc-6a9e-42c9-a7a4-c01b1a340803" (UID: "1335c6bc-6a9e-42c9-a7a4-c01b1a340803"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.025970 4689 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.033021 4689 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.033237 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.033304 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzd7c\" (UniqueName: \"kubernetes.io/projected/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-kube-api-access-tzd7c\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.033367 4689 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.150165 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1335c6bc-6a9e-42c9-a7a4-c01b1a340803" (UID: "1335c6bc-6a9e-42c9-a7a4-c01b1a340803"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.154497 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-config-data" (OuterVolumeSpecName: "config-data") pod "1335c6bc-6a9e-42c9-a7a4-c01b1a340803" (UID: "1335c6bc-6a9e-42c9-a7a4-c01b1a340803"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.162372 4689 scope.go:117] "RemoveContainer" containerID="4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.227230 4689 scope.go:117] "RemoveContainer" containerID="cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.244107 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.244153 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1335c6bc-6a9e-42c9-a7a4-c01b1a340803-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.251281 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:20 crc kubenswrapper[4689]: E1013 21:28:20.251514 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599\": container with ID starting with cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599 not found: ID does not exist" containerID="cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.251570 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599"} err="failed to get container status \"cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599\": rpc error: code = NotFound desc = could not find container \"cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599\": container with ID starting with cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599 not found: ID does not exist" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.251626 4689 scope.go:117] "RemoveContainer" containerID="4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3" Oct 13 21:28:20 crc kubenswrapper[4689]: E1013 21:28:20.255693 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3\": container with ID starting with 4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3 not found: ID does not exist" containerID="4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.255720 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3"} err="failed to get container status \"4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3\": rpc error: code = NotFound desc = could not find container \"4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3\": container with ID starting with 4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3 not found: ID does not exist" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.255748 4689 scope.go:117] "RemoveContainer" containerID="6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578" Oct 13 21:28:20 crc kubenswrapper[4689]: E1013 21:28:20.259919 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578\": container with ID starting with 6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578 not found: ID does not exist" containerID="6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.259957 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578"} err="failed to get container status \"6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578\": rpc error: code = NotFound desc = could not find container \"6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578\": container with ID starting with 6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578 not found: ID does not exist" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.259974 4689 scope.go:117] "RemoveContainer" containerID="4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c" Oct 13 21:28:20 crc kubenswrapper[4689]: E1013 21:28:20.263903 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c\": container with ID starting with 4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c not found: ID does not exist" containerID="4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.263934 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c"} err="failed to get container status \"4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c\": rpc error: code = NotFound desc = could not find container \"4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c\": container with ID starting with 4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c not found: ID does not exist" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.263953 4689 scope.go:117] "RemoveContainer" containerID="cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.264436 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599"} err="failed to get container status \"cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599\": rpc error: code = NotFound desc = could not find container \"cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599\": container with ID starting with cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599 not found: ID does not exist" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.264459 4689 scope.go:117] "RemoveContainer" containerID="4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.264885 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3"} err="failed to get container status \"4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3\": rpc error: code = NotFound desc = could not find container \"4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3\": container with ID starting with 4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3 not found: ID does not exist" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.264914 4689 scope.go:117] "RemoveContainer" containerID="6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.265283 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578"} err="failed to get container status \"6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578\": rpc error: code = NotFound desc = could not find container \"6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578\": container with ID starting with 6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578 not found: ID does not exist" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.265311 4689 scope.go:117] "RemoveContainer" containerID="4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.266099 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c"} err="failed to get container status \"4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c\": rpc error: code = NotFound desc = could not find container \"4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c\": container with ID starting with 4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c not found: ID does not exist" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.266124 4689 scope.go:117] "RemoveContainer" containerID="cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.273729 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599"} err="failed to get container status \"cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599\": rpc error: code = NotFound desc = could not find container \"cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599\": container with ID starting with cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599 not found: ID does not exist" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.273780 4689 scope.go:117] "RemoveContainer" containerID="4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.281215 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3"} err="failed to get container status \"4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3\": rpc error: code = NotFound desc = could not find container \"4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3\": container with ID starting with 4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3 not found: ID does not exist" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.281246 4689 scope.go:117] "RemoveContainer" containerID="6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.282533 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578"} err="failed to get container status \"6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578\": rpc error: code = NotFound desc = could not find container \"6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578\": container with ID starting with 6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578 not found: ID does not exist" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.282556 4689 scope.go:117] "RemoveContainer" containerID="4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.283065 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c"} err="failed to get container status \"4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c\": rpc error: code = NotFound desc = could not find container \"4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c\": container with ID starting with 4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c not found: ID does not exist" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.283090 4689 scope.go:117] "RemoveContainer" containerID="cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.283339 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599"} err="failed to get container status \"cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599\": rpc error: code = NotFound desc = could not find container \"cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599\": container with ID starting with cb81497faece96720aeb4f7cf74c79feb0c2b36c0b907af254d3b7a10ad12599 not found: ID does not exist" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.283357 4689 scope.go:117] "RemoveContainer" containerID="4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.283779 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3"} err="failed to get container status \"4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3\": rpc error: code = NotFound desc = could not find container \"4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3\": container with ID starting with 4e775230a22c0a67aebf7239856baa5180bcd8000d675fde5ad27a99834a17f3 not found: ID does not exist" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.283844 4689 scope.go:117] "RemoveContainer" containerID="6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.284253 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578"} err="failed to get container status \"6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578\": rpc error: code = NotFound desc = could not find container \"6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578\": container with ID starting with 6438eb3a493025bbfb11a6e0727decdd69cf9abfd676b5d7597655c5b9dc6578 not found: ID does not exist" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.284274 4689 scope.go:117] "RemoveContainer" containerID="4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.286476 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c"} err="failed to get container status \"4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c\": rpc error: code = NotFound desc = could not find container \"4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c\": container with ID starting with 4cc10c154eda46e39f684acb899886bf4dac2be3bc0a0734603dc996dc1ab47c not found: ID does not exist" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.288843 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.298195 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:20 crc kubenswrapper[4689]: E1013 21:28:20.299848 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd448066-1b70-4e35-959c-5c702d87560f" containerName="horizon-log" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.299929 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd448066-1b70-4e35-959c-5c702d87560f" containerName="horizon-log" Oct 13 21:28:20 crc kubenswrapper[4689]: E1013 21:28:20.300031 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerName="ceilometer-notification-agent" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.300082 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerName="ceilometer-notification-agent" Oct 13 21:28:20 crc kubenswrapper[4689]: E1013 21:28:20.300108 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd448066-1b70-4e35-959c-5c702d87560f" containerName="horizon" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.300115 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd448066-1b70-4e35-959c-5c702d87560f" containerName="horizon" Oct 13 21:28:20 crc kubenswrapper[4689]: E1013 21:28:20.300168 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerName="sg-core" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.300240 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerName="sg-core" Oct 13 21:28:20 crc kubenswrapper[4689]: E1013 21:28:20.300259 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerName="ceilometer-central-agent" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.300266 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerName="ceilometer-central-agent" Oct 13 21:28:20 crc kubenswrapper[4689]: E1013 21:28:20.300286 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc382ef2-90d0-4dd4-89cb-23d5e4dd9327" containerName="mariadb-database-create" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.300347 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc382ef2-90d0-4dd4-89cb-23d5e4dd9327" containerName="mariadb-database-create" Oct 13 21:28:20 crc kubenswrapper[4689]: E1013 21:28:20.300379 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="132dcc8a-a3c1-4c02-9cd8-be28bf0e006b" containerName="mariadb-database-create" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.300386 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="132dcc8a-a3c1-4c02-9cd8-be28bf0e006b" containerName="mariadb-database-create" Oct 13 21:28:20 crc kubenswrapper[4689]: E1013 21:28:20.300434 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerName="proxy-httpd" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.300442 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerName="proxy-httpd" Oct 13 21:28:20 crc kubenswrapper[4689]: E1013 21:28:20.300510 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e882c36f-61b5-436d-ba2e-94f12bbb5010" containerName="mariadb-database-create" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.300520 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="e882c36f-61b5-436d-ba2e-94f12bbb5010" containerName="mariadb-database-create" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.300799 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="132dcc8a-a3c1-4c02-9cd8-be28bf0e006b" containerName="mariadb-database-create" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.300857 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerName="ceilometer-central-agent" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.300872 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="e882c36f-61b5-436d-ba2e-94f12bbb5010" containerName="mariadb-database-create" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.300882 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerName="sg-core" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.300928 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc382ef2-90d0-4dd4-89cb-23d5e4dd9327" containerName="mariadb-database-create" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.300937 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd448066-1b70-4e35-959c-5c702d87560f" containerName="horizon-log" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.300946 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerName="proxy-httpd" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.300955 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" containerName="ceilometer-notification-agent" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.300967 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd448066-1b70-4e35-959c-5c702d87560f" containerName="horizon" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.309349 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.313986 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.314169 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.316204 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.342365 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.345821 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.345869 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-log-httpd\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.345896 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-config-data\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.345916 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-run-httpd\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.345945 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.345991 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85fff\" (UniqueName: \"kubernetes.io/projected/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-kube-api-access-85fff\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.346037 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-scripts\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.450150 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-config-data\") pod \"c0bc4829-b428-4d71-aeea-719aa026dcc0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.450620 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dp8d\" (UniqueName: \"kubernetes.io/projected/c0bc4829-b428-4d71-aeea-719aa026dcc0-kube-api-access-6dp8d\") pod \"c0bc4829-b428-4d71-aeea-719aa026dcc0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.450666 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0bc4829-b428-4d71-aeea-719aa026dcc0-httpd-run\") pod \"c0bc4829-b428-4d71-aeea-719aa026dcc0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.450704 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-public-tls-certs\") pod \"c0bc4829-b428-4d71-aeea-719aa026dcc0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.450726 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-scripts\") pod \"c0bc4829-b428-4d71-aeea-719aa026dcc0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.450744 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"c0bc4829-b428-4d71-aeea-719aa026dcc0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.450776 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0bc4829-b428-4d71-aeea-719aa026dcc0-logs\") pod \"c0bc4829-b428-4d71-aeea-719aa026dcc0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.450793 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-combined-ca-bundle\") pod \"c0bc4829-b428-4d71-aeea-719aa026dcc0\" (UID: \"c0bc4829-b428-4d71-aeea-719aa026dcc0\") " Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.450887 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.450939 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85fff\" (UniqueName: \"kubernetes.io/projected/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-kube-api-access-85fff\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.450985 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-scripts\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.451026 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.451051 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-log-httpd\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.451073 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-config-data\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.451093 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-run-httpd\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.451534 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-run-httpd\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.451996 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0bc4829-b428-4d71-aeea-719aa026dcc0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c0bc4829-b428-4d71-aeea-719aa026dcc0" (UID: "c0bc4829-b428-4d71-aeea-719aa026dcc0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.452368 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0bc4829-b428-4d71-aeea-719aa026dcc0-logs" (OuterVolumeSpecName: "logs") pod "c0bc4829-b428-4d71-aeea-719aa026dcc0" (UID: "c0bc4829-b428-4d71-aeea-719aa026dcc0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.453411 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-log-httpd\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.463388 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.465323 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.469082 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "c0bc4829-b428-4d71-aeea-719aa026dcc0" (UID: "c0bc4829-b428-4d71-aeea-719aa026dcc0"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.470714 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-scripts" (OuterVolumeSpecName: "scripts") pod "c0bc4829-b428-4d71-aeea-719aa026dcc0" (UID: "c0bc4829-b428-4d71-aeea-719aa026dcc0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.472494 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0bc4829-b428-4d71-aeea-719aa026dcc0-kube-api-access-6dp8d" (OuterVolumeSpecName: "kube-api-access-6dp8d") pod "c0bc4829-b428-4d71-aeea-719aa026dcc0" (UID: "c0bc4829-b428-4d71-aeea-719aa026dcc0"). InnerVolumeSpecName "kube-api-access-6dp8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.473089 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-scripts\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.474761 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-config-data\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.490855 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85fff\" (UniqueName: \"kubernetes.io/projected/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-kube-api-access-85fff\") pod \"ceilometer-0\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.508513 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c0bc4829-b428-4d71-aeea-719aa026dcc0" (UID: "c0bc4829-b428-4d71-aeea-719aa026dcc0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.552455 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.552536 4689 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.552550 4689 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0bc4829-b428-4d71-aeea-719aa026dcc0-logs\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.552563 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.552576 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dp8d\" (UniqueName: \"kubernetes.io/projected/c0bc4829-b428-4d71-aeea-719aa026dcc0-kube-api-access-6dp8d\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.552599 4689 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0bc4829-b428-4d71-aeea-719aa026dcc0-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.570328 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-config-data" (OuterVolumeSpecName: "config-data") pod "c0bc4829-b428-4d71-aeea-719aa026dcc0" (UID: "c0bc4829-b428-4d71-aeea-719aa026dcc0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.575565 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c0bc4829-b428-4d71-aeea-719aa026dcc0" (UID: "c0bc4829-b428-4d71-aeea-719aa026dcc0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.579929 4689 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.637741 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.656409 4689 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.656464 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.656481 4689 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0bc4829-b428-4d71-aeea-719aa026dcc0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.885707 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c0bc4829-b428-4d71-aeea-719aa026dcc0","Type":"ContainerDied","Data":"f2c5d0e526947325f049d9d9a74d5967cf6120ae68cc7db0edbf0efc4ed2241b"} Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.886125 4689 scope.go:117] "RemoveContainer" containerID="1298847ffef436f01cca433afe39c7766f3cd21da136c5e98d34689a544c66d9" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.886340 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 21:28:20 crc kubenswrapper[4689]: I1013 21:28:20.927325 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.063035 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.081801 4689 scope.go:117] "RemoveContainer" containerID="4a838a606986f7a7b6fbde47b397b75c844d3997c4c66c8fb463b4c7075d80e1" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.123845 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.141049 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 21:28:21 crc kubenswrapper[4689]: E1013 21:28:21.143498 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0bc4829-b428-4d71-aeea-719aa026dcc0" containerName="glance-log" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.143542 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0bc4829-b428-4d71-aeea-719aa026dcc0" containerName="glance-log" Oct 13 21:28:21 crc kubenswrapper[4689]: E1013 21:28:21.143563 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0bc4829-b428-4d71-aeea-719aa026dcc0" containerName="glance-httpd" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.143573 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0bc4829-b428-4d71-aeea-719aa026dcc0" containerName="glance-httpd" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.143794 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0bc4829-b428-4d71-aeea-719aa026dcc0" containerName="glance-httpd" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.143828 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0bc4829-b428-4d71-aeea-719aa026dcc0" containerName="glance-log" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.145839 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.155280 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.155498 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.174074 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.226416 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.278995 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-config-data\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.279053 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.279095 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.279117 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qhtn\" (UniqueName: \"kubernetes.io/projected/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-kube-api-access-2qhtn\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.279171 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.279187 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.279226 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-logs\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.279253 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-scripts\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.384287 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.384328 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.384380 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-logs\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.384414 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-scripts\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.384498 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-config-data\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.384533 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.384564 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.384604 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qhtn\" (UniqueName: \"kubernetes.io/projected/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-kube-api-access-2qhtn\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.385461 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.386722 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-logs\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.388831 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.393442 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-scripts\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.394451 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.395484 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.403616 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qhtn\" (UniqueName: \"kubernetes.io/projected/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-kube-api-access-2qhtn\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.418910 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a6e5dd8-c0bc-49ec-b03a-9971dbd85486-config-data\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.451628 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486\") " pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.503851 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.506490 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.590379 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f52lf\" (UniqueName: \"kubernetes.io/projected/519df139-a232-4218-a9ba-4d626fe3d115-kube-api-access-f52lf\") pod \"519df139-a232-4218-a9ba-4d626fe3d115\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.591200 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"519df139-a232-4218-a9ba-4d626fe3d115\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.591481 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-combined-ca-bundle\") pod \"519df139-a232-4218-a9ba-4d626fe3d115\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.591530 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-config-data\") pod \"519df139-a232-4218-a9ba-4d626fe3d115\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.591598 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-internal-tls-certs\") pod \"519df139-a232-4218-a9ba-4d626fe3d115\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.591659 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-scripts\") pod \"519df139-a232-4218-a9ba-4d626fe3d115\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.591792 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/519df139-a232-4218-a9ba-4d626fe3d115-httpd-run\") pod \"519df139-a232-4218-a9ba-4d626fe3d115\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.591831 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/519df139-a232-4218-a9ba-4d626fe3d115-logs\") pod \"519df139-a232-4218-a9ba-4d626fe3d115\" (UID: \"519df139-a232-4218-a9ba-4d626fe3d115\") " Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.593844 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/519df139-a232-4218-a9ba-4d626fe3d115-logs" (OuterVolumeSpecName: "logs") pod "519df139-a232-4218-a9ba-4d626fe3d115" (UID: "519df139-a232-4218-a9ba-4d626fe3d115"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.594247 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/519df139-a232-4218-a9ba-4d626fe3d115-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "519df139-a232-4218-a9ba-4d626fe3d115" (UID: "519df139-a232-4218-a9ba-4d626fe3d115"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.600994 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-scripts" (OuterVolumeSpecName: "scripts") pod "519df139-a232-4218-a9ba-4d626fe3d115" (UID: "519df139-a232-4218-a9ba-4d626fe3d115"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.606816 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "519df139-a232-4218-a9ba-4d626fe3d115" (UID: "519df139-a232-4218-a9ba-4d626fe3d115"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.607182 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/519df139-a232-4218-a9ba-4d626fe3d115-kube-api-access-f52lf" (OuterVolumeSpecName: "kube-api-access-f52lf") pod "519df139-a232-4218-a9ba-4d626fe3d115" (UID: "519df139-a232-4218-a9ba-4d626fe3d115"). InnerVolumeSpecName "kube-api-access-f52lf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.655015 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "519df139-a232-4218-a9ba-4d626fe3d115" (UID: "519df139-a232-4218-a9ba-4d626fe3d115"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.660818 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "519df139-a232-4218-a9ba-4d626fe3d115" (UID: "519df139-a232-4218-a9ba-4d626fe3d115"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.669931 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-config-data" (OuterVolumeSpecName: "config-data") pod "519df139-a232-4218-a9ba-4d626fe3d115" (UID: "519df139-a232-4218-a9ba-4d626fe3d115"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.693767 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f52lf\" (UniqueName: \"kubernetes.io/projected/519df139-a232-4218-a9ba-4d626fe3d115-kube-api-access-f52lf\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.693820 4689 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.693846 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.693857 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.693866 4689 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.693873 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/519df139-a232-4218-a9ba-4d626fe3d115-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.693882 4689 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/519df139-a232-4218-a9ba-4d626fe3d115-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.693890 4689 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/519df139-a232-4218-a9ba-4d626fe3d115-logs\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.732223 4689 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.795982 4689 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.882240 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1335c6bc-6a9e-42c9-a7a4-c01b1a340803" path="/var/lib/kubelet/pods/1335c6bc-6a9e-42c9-a7a4-c01b1a340803/volumes" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.886115 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0bc4829-b428-4d71-aeea-719aa026dcc0" path="/var/lib/kubelet/pods/c0bc4829-b428-4d71-aeea-719aa026dcc0/volumes" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.935014 4689 generic.go:334] "Generic (PLEG): container finished" podID="140a26a7-0308-4d50-b2ec-d2e55be6b812" containerID="37d73f863fb58767978048393ee9d0ff0b55db0ac178ce34413ea96fce4a44b8" exitCode=0 Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.935104 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-86dc7995bd-76xtf" event={"ID":"140a26a7-0308-4d50-b2ec-d2e55be6b812","Type":"ContainerDied","Data":"37d73f863fb58767978048393ee9d0ff0b55db0ac178ce34413ea96fce4a44b8"} Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.944014 4689 generic.go:334] "Generic (PLEG): container finished" podID="519df139-a232-4218-a9ba-4d626fe3d115" containerID="1e326a825f48a16f44e192a8a7ba6b2954101832e8a8a1b8d0b084f1767a19a5" exitCode=0 Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.944101 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"519df139-a232-4218-a9ba-4d626fe3d115","Type":"ContainerDied","Data":"1e326a825f48a16f44e192a8a7ba6b2954101832e8a8a1b8d0b084f1767a19a5"} Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.944135 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"519df139-a232-4218-a9ba-4d626fe3d115","Type":"ContainerDied","Data":"308eda75dd02282ed98a1a55922058e16de09748eb978c9f067acb3b2cd19443"} Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.944156 4689 scope.go:117] "RemoveContainer" containerID="1e326a825f48a16f44e192a8a7ba6b2954101832e8a8a1b8d0b084f1767a19a5" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.944283 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.954925 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c4d7daf0-6d4a-4de8-b88c-98585c49e40d","Type":"ContainerStarted","Data":"9e8e02cd3edcf03bf543ce2742e86c58c05e85400dc7a7824570a7aaf29bd9ad"} Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.994642 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 21:28:21 crc kubenswrapper[4689]: I1013 21:28:21.999911 4689 scope.go:117] "RemoveContainer" containerID="0882d60c720ad049ea0aca790fec7010716656be4bc9da02964eb76b95d9ac35" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.023645 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.035473 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 21:28:22 crc kubenswrapper[4689]: E1013 21:28:22.035968 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="519df139-a232-4218-a9ba-4d626fe3d115" containerName="glance-httpd" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.035987 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="519df139-a232-4218-a9ba-4d626fe3d115" containerName="glance-httpd" Oct 13 21:28:22 crc kubenswrapper[4689]: E1013 21:28:22.036006 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="519df139-a232-4218-a9ba-4d626fe3d115" containerName="glance-log" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.036013 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="519df139-a232-4218-a9ba-4d626fe3d115" containerName="glance-log" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.036207 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="519df139-a232-4218-a9ba-4d626fe3d115" containerName="glance-log" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.036237 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="519df139-a232-4218-a9ba-4d626fe3d115" containerName="glance-httpd" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.037315 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.042741 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.043353 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.043867 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.057523 4689 scope.go:117] "RemoveContainer" containerID="1e326a825f48a16f44e192a8a7ba6b2954101832e8a8a1b8d0b084f1767a19a5" Oct 13 21:28:22 crc kubenswrapper[4689]: E1013 21:28:22.058830 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e326a825f48a16f44e192a8a7ba6b2954101832e8a8a1b8d0b084f1767a19a5\": container with ID starting with 1e326a825f48a16f44e192a8a7ba6b2954101832e8a8a1b8d0b084f1767a19a5 not found: ID does not exist" containerID="1e326a825f48a16f44e192a8a7ba6b2954101832e8a8a1b8d0b084f1767a19a5" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.058981 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e326a825f48a16f44e192a8a7ba6b2954101832e8a8a1b8d0b084f1767a19a5"} err="failed to get container status \"1e326a825f48a16f44e192a8a7ba6b2954101832e8a8a1b8d0b084f1767a19a5\": rpc error: code = NotFound desc = could not find container \"1e326a825f48a16f44e192a8a7ba6b2954101832e8a8a1b8d0b084f1767a19a5\": container with ID starting with 1e326a825f48a16f44e192a8a7ba6b2954101832e8a8a1b8d0b084f1767a19a5 not found: ID does not exist" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.059089 4689 scope.go:117] "RemoveContainer" containerID="0882d60c720ad049ea0aca790fec7010716656be4bc9da02964eb76b95d9ac35" Oct 13 21:28:22 crc kubenswrapper[4689]: E1013 21:28:22.059430 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0882d60c720ad049ea0aca790fec7010716656be4bc9da02964eb76b95d9ac35\": container with ID starting with 0882d60c720ad049ea0aca790fec7010716656be4bc9da02964eb76b95d9ac35 not found: ID does not exist" containerID="0882d60c720ad049ea0aca790fec7010716656be4bc9da02964eb76b95d9ac35" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.059649 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0882d60c720ad049ea0aca790fec7010716656be4bc9da02964eb76b95d9ac35"} err="failed to get container status \"0882d60c720ad049ea0aca790fec7010716656be4bc9da02964eb76b95d9ac35\": rpc error: code = NotFound desc = could not find container \"0882d60c720ad049ea0aca790fec7010716656be4bc9da02964eb76b95d9ac35\": container with ID starting with 0882d60c720ad049ea0aca790fec7010716656be4bc9da02964eb76b95d9ac35 not found: ID does not exist" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.103048 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.103313 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.103461 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcv2h\" (UniqueName: \"kubernetes.io/projected/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-kube-api-access-tcv2h\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.103710 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.103987 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-logs\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.104039 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.104151 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.104219 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.116537 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.219092 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-logs\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.219182 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.219319 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.219401 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.219460 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.219578 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.219701 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcv2h\" (UniqueName: \"kubernetes.io/projected/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-kube-api-access-tcv2h\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.219796 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.220334 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.224198 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.226710 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-logs\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.228654 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.241386 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.246228 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.249292 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcv2h\" (UniqueName: \"kubernetes.io/projected/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-kube-api-access-tcv2h\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.251773 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.296990 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47\") " pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.366698 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.404718 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.528113 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57t9l\" (UniqueName: \"kubernetes.io/projected/140a26a7-0308-4d50-b2ec-d2e55be6b812-kube-api-access-57t9l\") pod \"140a26a7-0308-4d50-b2ec-d2e55be6b812\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.528505 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-httpd-config\") pod \"140a26a7-0308-4d50-b2ec-d2e55be6b812\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.532663 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.533097 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-config\") pod \"140a26a7-0308-4d50-b2ec-d2e55be6b812\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.533128 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-ovndb-tls-certs\") pod \"140a26a7-0308-4d50-b2ec-d2e55be6b812\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.533191 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-combined-ca-bundle\") pod \"140a26a7-0308-4d50-b2ec-d2e55be6b812\" (UID: \"140a26a7-0308-4d50-b2ec-d2e55be6b812\") " Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.539855 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/140a26a7-0308-4d50-b2ec-d2e55be6b812-kube-api-access-57t9l" (OuterVolumeSpecName: "kube-api-access-57t9l") pod "140a26a7-0308-4d50-b2ec-d2e55be6b812" (UID: "140a26a7-0308-4d50-b2ec-d2e55be6b812"). InnerVolumeSpecName "kube-api-access-57t9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.540870 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "140a26a7-0308-4d50-b2ec-d2e55be6b812" (UID: "140a26a7-0308-4d50-b2ec-d2e55be6b812"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.628344 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-config" (OuterVolumeSpecName: "config") pod "140a26a7-0308-4d50-b2ec-d2e55be6b812" (UID: "140a26a7-0308-4d50-b2ec-d2e55be6b812"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.636659 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57t9l\" (UniqueName: \"kubernetes.io/projected/140a26a7-0308-4d50-b2ec-d2e55be6b812-kube-api-access-57t9l\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.636689 4689 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.636700 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.641798 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "140a26a7-0308-4d50-b2ec-d2e55be6b812" (UID: "140a26a7-0308-4d50-b2ec-d2e55be6b812"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.665338 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "140a26a7-0308-4d50-b2ec-d2e55be6b812" (UID: "140a26a7-0308-4d50-b2ec-d2e55be6b812"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.738809 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.739100 4689 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/140a26a7-0308-4d50-b2ec-d2e55be6b812-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.978928 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c4d7daf0-6d4a-4de8-b88c-98585c49e40d","Type":"ContainerStarted","Data":"2b58d705453839aae353db02f1acf9056a0811e300164ff852c5e84a78247595"} Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.978967 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c4d7daf0-6d4a-4de8-b88c-98585c49e40d","Type":"ContainerStarted","Data":"df3906572b27891aebbbfd5cfd1d75d06879bf058133db34e940086c531ac6a6"} Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.983273 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-86dc7995bd-76xtf" event={"ID":"140a26a7-0308-4d50-b2ec-d2e55be6b812","Type":"ContainerDied","Data":"2ed6598d54ce854ba84448b5dc59a5d887382e2342ad4f310a6b4b9ee537b7fb"} Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.983318 4689 scope.go:117] "RemoveContainer" containerID="3d9c65c1bce1f25279f529b62b6ec8e12128ae3528a65493d767422116dd67e9" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.983448 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-86dc7995bd-76xtf" Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.994707 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486","Type":"ContainerStarted","Data":"e50c9304be9580ee74f0f6199bb178fbd540c9f4783e0c467f59e218200a3ed8"} Oct 13 21:28:22 crc kubenswrapper[4689]: I1013 21:28:22.994751 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486","Type":"ContainerStarted","Data":"672c1937da13d839d1b271de60f1ca0608098c46c0b782ce0b8084eda00c6d29"} Oct 13 21:28:23 crc kubenswrapper[4689]: I1013 21:28:23.064252 4689 scope.go:117] "RemoveContainer" containerID="37d73f863fb58767978048393ee9d0ff0b55db0ac178ce34413ea96fce4a44b8" Oct 13 21:28:23 crc kubenswrapper[4689]: I1013 21:28:23.064969 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-86dc7995bd-76xtf"] Oct 13 21:28:23 crc kubenswrapper[4689]: I1013 21:28:23.093658 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 13 21:28:23 crc kubenswrapper[4689]: I1013 21:28:23.124658 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-86dc7995bd-76xtf"] Oct 13 21:28:23 crc kubenswrapper[4689]: I1013 21:28:23.867051 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:28:23 crc kubenswrapper[4689]: I1013 21:28:23.867467 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:28:23 crc kubenswrapper[4689]: I1013 21:28:23.887824 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="140a26a7-0308-4d50-b2ec-d2e55be6b812" path="/var/lib/kubelet/pods/140a26a7-0308-4d50-b2ec-d2e55be6b812/volumes" Oct 13 21:28:23 crc kubenswrapper[4689]: I1013 21:28:23.888529 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="519df139-a232-4218-a9ba-4d626fe3d115" path="/var/lib/kubelet/pods/519df139-a232-4218-a9ba-4d626fe3d115/volumes" Oct 13 21:28:24 crc kubenswrapper[4689]: I1013 21:28:24.028783 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2a6e5dd8-c0bc-49ec-b03a-9971dbd85486","Type":"ContainerStarted","Data":"27df07de682746819b7b3a2d563c83039ffea55dcd67596eb7ab5e31d62c7c1a"} Oct 13 21:28:24 crc kubenswrapper[4689]: I1013 21:28:24.038820 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47","Type":"ContainerStarted","Data":"34da5241eb270e65f5a4920768f42ffaa8f50b4d552c3c0b50abbec08211ae72"} Oct 13 21:28:24 crc kubenswrapper[4689]: I1013 21:28:24.038844 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47","Type":"ContainerStarted","Data":"5f69e7bf1585172c0969a42be118f1e95f61a02e4ce4f70a8fcf6f960d035da2"} Oct 13 21:28:24 crc kubenswrapper[4689]: I1013 21:28:24.040327 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c4d7daf0-6d4a-4de8-b88c-98585c49e40d","Type":"ContainerStarted","Data":"a6c993c2a1c989946f74494b962e6303a1d4eac715eb90ce3d24bb8c59596c01"} Oct 13 21:28:24 crc kubenswrapper[4689]: I1013 21:28:24.068038 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.068022224 podStartE2EDuration="3.068022224s" podCreationTimestamp="2025-10-13 21:28:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:28:24.058219103 +0000 UTC m=+1020.976464188" watchObservedRunningTime="2025-10-13 21:28:24.068022224 +0000 UTC m=+1020.986267309" Oct 13 21:28:25 crc kubenswrapper[4689]: I1013 21:28:25.057665 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47","Type":"ContainerStarted","Data":"2386a6a2ddb083e4412f926516651b4e6c2bcd48db32cb3d297667bbc23b3a77"} Oct 13 21:28:25 crc kubenswrapper[4689]: I1013 21:28:25.083371 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.083348483 podStartE2EDuration="4.083348483s" podCreationTimestamp="2025-10-13 21:28:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:28:25.083009485 +0000 UTC m=+1022.001254570" watchObservedRunningTime="2025-10-13 21:28:25.083348483 +0000 UTC m=+1022.001593568" Oct 13 21:28:26 crc kubenswrapper[4689]: I1013 21:28:26.093213 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c4d7daf0-6d4a-4de8-b88c-98585c49e40d","Type":"ContainerStarted","Data":"229b149bb3632b048eed80f7906f9982ddd3cb1a4a3412acea01e7ab8d26461a"} Oct 13 21:28:26 crc kubenswrapper[4689]: I1013 21:28:26.093627 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 13 21:28:26 crc kubenswrapper[4689]: I1013 21:28:26.093633 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerName="proxy-httpd" containerID="cri-o://229b149bb3632b048eed80f7906f9982ddd3cb1a4a3412acea01e7ab8d26461a" gracePeriod=30 Oct 13 21:28:26 crc kubenswrapper[4689]: I1013 21:28:26.093668 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerName="ceilometer-notification-agent" containerID="cri-o://2b58d705453839aae353db02f1acf9056a0811e300164ff852c5e84a78247595" gracePeriod=30 Oct 13 21:28:26 crc kubenswrapper[4689]: I1013 21:28:26.093358 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerName="ceilometer-central-agent" containerID="cri-o://df3906572b27891aebbbfd5cfd1d75d06879bf058133db34e940086c531ac6a6" gracePeriod=30 Oct 13 21:28:26 crc kubenswrapper[4689]: I1013 21:28:26.093766 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerName="sg-core" containerID="cri-o://a6c993c2a1c989946f74494b962e6303a1d4eac715eb90ce3d24bb8c59596c01" gracePeriod=30 Oct 13 21:28:26 crc kubenswrapper[4689]: I1013 21:28:26.135207 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.5117689800000003 podStartE2EDuration="6.135186404s" podCreationTimestamp="2025-10-13 21:28:20 +0000 UTC" firstStartedPulling="2025-10-13 21:28:21.255728996 +0000 UTC m=+1018.173974081" lastFinishedPulling="2025-10-13 21:28:24.87914642 +0000 UTC m=+1021.797391505" observedRunningTime="2025-10-13 21:28:26.133021693 +0000 UTC m=+1023.051266788" watchObservedRunningTime="2025-10-13 21:28:26.135186404 +0000 UTC m=+1023.053431489" Oct 13 21:28:26 crc kubenswrapper[4689]: I1013 21:28:26.166578 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 13 21:28:27 crc kubenswrapper[4689]: I1013 21:28:27.109100 4689 generic.go:334] "Generic (PLEG): container finished" podID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerID="229b149bb3632b048eed80f7906f9982ddd3cb1a4a3412acea01e7ab8d26461a" exitCode=0 Oct 13 21:28:27 crc kubenswrapper[4689]: I1013 21:28:27.109137 4689 generic.go:334] "Generic (PLEG): container finished" podID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerID="a6c993c2a1c989946f74494b962e6303a1d4eac715eb90ce3d24bb8c59596c01" exitCode=2 Oct 13 21:28:27 crc kubenswrapper[4689]: I1013 21:28:27.109145 4689 generic.go:334] "Generic (PLEG): container finished" podID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerID="2b58d705453839aae353db02f1acf9056a0811e300164ff852c5e84a78247595" exitCode=0 Oct 13 21:28:27 crc kubenswrapper[4689]: I1013 21:28:27.109166 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c4d7daf0-6d4a-4de8-b88c-98585c49e40d","Type":"ContainerDied","Data":"229b149bb3632b048eed80f7906f9982ddd3cb1a4a3412acea01e7ab8d26461a"} Oct 13 21:28:27 crc kubenswrapper[4689]: I1013 21:28:27.109191 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c4d7daf0-6d4a-4de8-b88c-98585c49e40d","Type":"ContainerDied","Data":"a6c993c2a1c989946f74494b962e6303a1d4eac715eb90ce3d24bb8c59596c01"} Oct 13 21:28:27 crc kubenswrapper[4689]: I1013 21:28:27.109200 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c4d7daf0-6d4a-4de8-b88c-98585c49e40d","Type":"ContainerDied","Data":"2b58d705453839aae353db02f1acf9056a0811e300164ff852c5e84a78247595"} Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.135747 4689 generic.go:334] "Generic (PLEG): container finished" podID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerID="df3906572b27891aebbbfd5cfd1d75d06879bf058133db34e940086c531ac6a6" exitCode=0 Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.135890 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c4d7daf0-6d4a-4de8-b88c-98585c49e40d","Type":"ContainerDied","Data":"df3906572b27891aebbbfd5cfd1d75d06879bf058133db34e940086c531ac6a6"} Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.335804 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.512967 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85fff\" (UniqueName: \"kubernetes.io/projected/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-kube-api-access-85fff\") pod \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.513438 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-log-httpd\") pod \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.513461 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-run-httpd\") pod \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.513534 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-sg-core-conf-yaml\") pod \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.513558 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-scripts\") pod \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.513606 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-combined-ca-bundle\") pod \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.513663 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-config-data\") pod \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\" (UID: \"c4d7daf0-6d4a-4de8-b88c-98585c49e40d\") " Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.514983 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c4d7daf0-6d4a-4de8-b88c-98585c49e40d" (UID: "c4d7daf0-6d4a-4de8-b88c-98585c49e40d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.515158 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c4d7daf0-6d4a-4de8-b88c-98585c49e40d" (UID: "c4d7daf0-6d4a-4de8-b88c-98585c49e40d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.520245 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-scripts" (OuterVolumeSpecName: "scripts") pod "c4d7daf0-6d4a-4de8-b88c-98585c49e40d" (UID: "c4d7daf0-6d4a-4de8-b88c-98585c49e40d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.538793 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-kube-api-access-85fff" (OuterVolumeSpecName: "kube-api-access-85fff") pod "c4d7daf0-6d4a-4de8-b88c-98585c49e40d" (UID: "c4d7daf0-6d4a-4de8-b88c-98585c49e40d"). InnerVolumeSpecName "kube-api-access-85fff". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.541711 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c4d7daf0-6d4a-4de8-b88c-98585c49e40d" (UID: "c4d7daf0-6d4a-4de8-b88c-98585c49e40d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.589036 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c4d7daf0-6d4a-4de8-b88c-98585c49e40d" (UID: "c4d7daf0-6d4a-4de8-b88c-98585c49e40d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.615291 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.615323 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.615335 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85fff\" (UniqueName: \"kubernetes.io/projected/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-kube-api-access-85fff\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.615346 4689 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.615357 4689 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.615364 4689 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.616062 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-config-data" (OuterVolumeSpecName: "config-data") pod "c4d7daf0-6d4a-4de8-b88c-98585c49e40d" (UID: "c4d7daf0-6d4a-4de8-b88c-98585c49e40d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:29 crc kubenswrapper[4689]: I1013 21:28:29.717553 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4d7daf0-6d4a-4de8-b88c-98585c49e40d-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.148917 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c4d7daf0-6d4a-4de8-b88c-98585c49e40d","Type":"ContainerDied","Data":"9e8e02cd3edcf03bf543ce2742e86c58c05e85400dc7a7824570a7aaf29bd9ad"} Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.148976 4689 scope.go:117] "RemoveContainer" containerID="229b149bb3632b048eed80f7906f9982ddd3cb1a4a3412acea01e7ab8d26461a" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.148998 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.174154 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.189900 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.201492 4689 scope.go:117] "RemoveContainer" containerID="a6c993c2a1c989946f74494b962e6303a1d4eac715eb90ce3d24bb8c59596c01" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.205795 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:30 crc kubenswrapper[4689]: E1013 21:28:30.206673 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="140a26a7-0308-4d50-b2ec-d2e55be6b812" containerName="neutron-httpd" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.206709 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="140a26a7-0308-4d50-b2ec-d2e55be6b812" containerName="neutron-httpd" Oct 13 21:28:30 crc kubenswrapper[4689]: E1013 21:28:30.206727 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerName="ceilometer-central-agent" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.206740 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerName="ceilometer-central-agent" Oct 13 21:28:30 crc kubenswrapper[4689]: E1013 21:28:30.206764 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="140a26a7-0308-4d50-b2ec-d2e55be6b812" containerName="neutron-api" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.206776 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="140a26a7-0308-4d50-b2ec-d2e55be6b812" containerName="neutron-api" Oct 13 21:28:30 crc kubenswrapper[4689]: E1013 21:28:30.206801 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerName="ceilometer-notification-agent" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.206812 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerName="ceilometer-notification-agent" Oct 13 21:28:30 crc kubenswrapper[4689]: E1013 21:28:30.206835 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerName="proxy-httpd" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.206848 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerName="proxy-httpd" Oct 13 21:28:30 crc kubenswrapper[4689]: E1013 21:28:30.206899 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerName="sg-core" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.206910 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerName="sg-core" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.207252 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerName="proxy-httpd" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.207295 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="140a26a7-0308-4d50-b2ec-d2e55be6b812" containerName="neutron-httpd" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.207319 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerName="ceilometer-notification-agent" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.207345 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="140a26a7-0308-4d50-b2ec-d2e55be6b812" containerName="neutron-api" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.207365 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerName="sg-core" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.207384 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" containerName="ceilometer-central-agent" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.210376 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.214434 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.215330 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.228905 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-config-data\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.229026 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/692b35cd-fab7-432a-a19c-046f4684dcef-log-httpd\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.229249 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-scripts\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.229329 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.229433 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/692b35cd-fab7-432a-a19c-046f4684dcef-run-httpd\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.229616 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5zxc\" (UniqueName: \"kubernetes.io/projected/692b35cd-fab7-432a-a19c-046f4684dcef-kube-api-access-d5zxc\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.229676 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.235995 4689 scope.go:117] "RemoveContainer" containerID="2b58d705453839aae353db02f1acf9056a0811e300164ff852c5e84a78247595" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.243660 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.255014 4689 scope.go:117] "RemoveContainer" containerID="df3906572b27891aebbbfd5cfd1d75d06879bf058133db34e940086c531ac6a6" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.331158 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5zxc\" (UniqueName: \"kubernetes.io/projected/692b35cd-fab7-432a-a19c-046f4684dcef-kube-api-access-d5zxc\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.331220 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.331353 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-config-data\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.331400 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/692b35cd-fab7-432a-a19c-046f4684dcef-log-httpd\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.331442 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-scripts\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.331466 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.331502 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/692b35cd-fab7-432a-a19c-046f4684dcef-run-httpd\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.332420 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/692b35cd-fab7-432a-a19c-046f4684dcef-run-httpd\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.332448 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/692b35cd-fab7-432a-a19c-046f4684dcef-log-httpd\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.336785 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.337156 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-config-data\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.341176 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-scripts\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.341323 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.347571 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5zxc\" (UniqueName: \"kubernetes.io/projected/692b35cd-fab7-432a-a19c-046f4684dcef-kube-api-access-d5zxc\") pod \"ceilometer-0\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " pod="openstack/ceilometer-0" Oct 13 21:28:30 crc kubenswrapper[4689]: I1013 21:28:30.532801 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:31 crc kubenswrapper[4689]: I1013 21:28:31.008217 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:31 crc kubenswrapper[4689]: I1013 21:28:31.165719 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"692b35cd-fab7-432a-a19c-046f4684dcef","Type":"ContainerStarted","Data":"348c779d1ed48b3dbb3878ec2fa4cc9524e827ca79762d8dcc66bd3562c11f1f"} Oct 13 21:28:31 crc kubenswrapper[4689]: I1013 21:28:31.507279 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 13 21:28:31 crc kubenswrapper[4689]: I1013 21:28:31.507344 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 13 21:28:31 crc kubenswrapper[4689]: I1013 21:28:31.550510 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 13 21:28:31 crc kubenswrapper[4689]: I1013 21:28:31.586640 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 13 21:28:31 crc kubenswrapper[4689]: I1013 21:28:31.885085 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4d7daf0-6d4a-4de8-b88c-98585c49e40d" path="/var/lib/kubelet/pods/c4d7daf0-6d4a-4de8-b88c-98585c49e40d/volumes" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.182112 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"692b35cd-fab7-432a-a19c-046f4684dcef","Type":"ContainerStarted","Data":"df8ea4d5bf796ae907e5a513e850fd25db9e6feba50248f80ca51f7ed6d8bc6e"} Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.182184 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.182202 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.271605 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-5f42-account-create-jvccr"] Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.278014 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f42-account-create-jvccr" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.280177 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.287023 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5f42-account-create-jvccr"] Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.367151 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.367429 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.411051 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.422134 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.479825 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ddl4\" (UniqueName: \"kubernetes.io/projected/0d841e40-0d97-4ec7-a0fa-ad896fda51bf-kube-api-access-8ddl4\") pod \"nova-api-5f42-account-create-jvccr\" (UID: \"0d841e40-0d97-4ec7-a0fa-ad896fda51bf\") " pod="openstack/nova-api-5f42-account-create-jvccr" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.525195 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-a47a-account-create-s6fdj"] Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.526643 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-a47a-account-create-s6fdj" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.533674 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-a47a-account-create-s6fdj"] Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.533904 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.581913 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ddl4\" (UniqueName: \"kubernetes.io/projected/0d841e40-0d97-4ec7-a0fa-ad896fda51bf-kube-api-access-8ddl4\") pod \"nova-api-5f42-account-create-jvccr\" (UID: \"0d841e40-0d97-4ec7-a0fa-ad896fda51bf\") " pod="openstack/nova-api-5f42-account-create-jvccr" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.598342 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ddl4\" (UniqueName: \"kubernetes.io/projected/0d841e40-0d97-4ec7-a0fa-ad896fda51bf-kube-api-access-8ddl4\") pod \"nova-api-5f42-account-create-jvccr\" (UID: \"0d841e40-0d97-4ec7-a0fa-ad896fda51bf\") " pod="openstack/nova-api-5f42-account-create-jvccr" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.604496 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f42-account-create-jvccr" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.676864 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-5173-account-create-5rmz2"] Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.678043 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5173-account-create-5rmz2" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.681070 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.684075 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ksk2\" (UniqueName: \"kubernetes.io/projected/7680d8a1-9a0d-443b-8ac4-08a8e60cccc5-kube-api-access-9ksk2\") pod \"nova-cell0-a47a-account-create-s6fdj\" (UID: \"7680d8a1-9a0d-443b-8ac4-08a8e60cccc5\") " pod="openstack/nova-cell0-a47a-account-create-s6fdj" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.689361 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-5173-account-create-5rmz2"] Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.785962 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ksk2\" (UniqueName: \"kubernetes.io/projected/7680d8a1-9a0d-443b-8ac4-08a8e60cccc5-kube-api-access-9ksk2\") pod \"nova-cell0-a47a-account-create-s6fdj\" (UID: \"7680d8a1-9a0d-443b-8ac4-08a8e60cccc5\") " pod="openstack/nova-cell0-a47a-account-create-s6fdj" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.786295 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pghzq\" (UniqueName: \"kubernetes.io/projected/68719362-13c4-4fca-8437-e68b33609d60-kube-api-access-pghzq\") pod \"nova-cell1-5173-account-create-5rmz2\" (UID: \"68719362-13c4-4fca-8437-e68b33609d60\") " pod="openstack/nova-cell1-5173-account-create-5rmz2" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.811863 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ksk2\" (UniqueName: \"kubernetes.io/projected/7680d8a1-9a0d-443b-8ac4-08a8e60cccc5-kube-api-access-9ksk2\") pod \"nova-cell0-a47a-account-create-s6fdj\" (UID: \"7680d8a1-9a0d-443b-8ac4-08a8e60cccc5\") " pod="openstack/nova-cell0-a47a-account-create-s6fdj" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.865880 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-a47a-account-create-s6fdj" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.887567 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pghzq\" (UniqueName: \"kubernetes.io/projected/68719362-13c4-4fca-8437-e68b33609d60-kube-api-access-pghzq\") pod \"nova-cell1-5173-account-create-5rmz2\" (UID: \"68719362-13c4-4fca-8437-e68b33609d60\") " pod="openstack/nova-cell1-5173-account-create-5rmz2" Oct 13 21:28:32 crc kubenswrapper[4689]: I1013 21:28:32.904576 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pghzq\" (UniqueName: \"kubernetes.io/projected/68719362-13c4-4fca-8437-e68b33609d60-kube-api-access-pghzq\") pod \"nova-cell1-5173-account-create-5rmz2\" (UID: \"68719362-13c4-4fca-8437-e68b33609d60\") " pod="openstack/nova-cell1-5173-account-create-5rmz2" Oct 13 21:28:33 crc kubenswrapper[4689]: I1013 21:28:33.047165 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5173-account-create-5rmz2" Oct 13 21:28:33 crc kubenswrapper[4689]: I1013 21:28:33.082993 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5f42-account-create-jvccr"] Oct 13 21:28:33 crc kubenswrapper[4689]: I1013 21:28:33.199894 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5f42-account-create-jvccr" event={"ID":"0d841e40-0d97-4ec7-a0fa-ad896fda51bf","Type":"ContainerStarted","Data":"6cbc6404777335e2ee7629780d223388f96b83b1bbdaefe6d9209d2f4456b474"} Oct 13 21:28:33 crc kubenswrapper[4689]: I1013 21:28:33.204348 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"692b35cd-fab7-432a-a19c-046f4684dcef","Type":"ContainerStarted","Data":"e81bda92c5910dd26b67da394487dc68f240781379550397e6c7f72c0b4ebd1d"} Oct 13 21:28:33 crc kubenswrapper[4689]: I1013 21:28:33.204568 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"692b35cd-fab7-432a-a19c-046f4684dcef","Type":"ContainerStarted","Data":"d54b529e7a346a94ab81648f877aae3b31999dc11d96aae6150a83e58cc4bf0c"} Oct 13 21:28:33 crc kubenswrapper[4689]: I1013 21:28:33.205462 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 13 21:28:33 crc kubenswrapper[4689]: I1013 21:28:33.205489 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 13 21:28:33 crc kubenswrapper[4689]: I1013 21:28:33.395857 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-a47a-account-create-s6fdj"] Oct 13 21:28:33 crc kubenswrapper[4689]: W1013 21:28:33.396920 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4d7daf0_6d4a_4de8_b88c_98585c49e40d.slice/crio-229b149bb3632b048eed80f7906f9982ddd3cb1a4a3412acea01e7ab8d26461a.scope WatchSource:0}: Error finding container 229b149bb3632b048eed80f7906f9982ddd3cb1a4a3412acea01e7ab8d26461a: Status 404 returned error can't find the container with id 229b149bb3632b048eed80f7906f9982ddd3cb1a4a3412acea01e7ab8d26461a Oct 13 21:28:33 crc kubenswrapper[4689]: I1013 21:28:33.509120 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-5173-account-create-5rmz2"] Oct 13 21:28:33 crc kubenswrapper[4689]: W1013 21:28:33.658063 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68719362_13c4_4fca_8437_e68b33609d60.slice/crio-cc28363c9eb78bf81cafa73f4f408a4f78f824bdc2d857a1a8418d4376ee1446 WatchSource:0}: Error finding container cc28363c9eb78bf81cafa73f4f408a4f78f824bdc2d857a1a8418d4376ee1446: Status 404 returned error can't find the container with id cc28363c9eb78bf81cafa73f4f408a4f78f824bdc2d857a1a8418d4376ee1446 Oct 13 21:28:33 crc kubenswrapper[4689]: I1013 21:28:33.856603 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 13 21:28:33 crc kubenswrapper[4689]: E1013 21:28:33.984136 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75cc0f0c_aa8d_4333_b83f_d0f740c8308c.slice/crio-conmon-e407ca308df5d7338f5db906ff946a4e1b20bf7036a0836eecc14ccccb69ed67.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75cc0f0c_aa8d_4333_b83f_d0f740c8308c.slice/crio-e407ca308df5d7338f5db906ff946a4e1b20bf7036a0836eecc14ccccb69ed67.scope\": RecentStats: unable to find data in memory cache]" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.016349 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-config-data-custom\") pod \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.016401 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-etc-machine-id\") pod \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.016469 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-logs\") pod \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.016560 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xxrk\" (UniqueName: \"kubernetes.io/projected/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-kube-api-access-7xxrk\") pod \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.016669 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-config-data\") pod \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.016705 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-combined-ca-bundle\") pod \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.016732 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-scripts\") pod \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\" (UID: \"75cc0f0c-aa8d-4333-b83f-d0f740c8308c\") " Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.018408 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-logs" (OuterVolumeSpecName: "logs") pod "75cc0f0c-aa8d-4333-b83f-d0f740c8308c" (UID: "75cc0f0c-aa8d-4333-b83f-d0f740c8308c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.021791 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "75cc0f0c-aa8d-4333-b83f-d0f740c8308c" (UID: "75cc0f0c-aa8d-4333-b83f-d0f740c8308c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.047328 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "75cc0f0c-aa8d-4333-b83f-d0f740c8308c" (UID: "75cc0f0c-aa8d-4333-b83f-d0f740c8308c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.047679 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-kube-api-access-7xxrk" (OuterVolumeSpecName: "kube-api-access-7xxrk") pod "75cc0f0c-aa8d-4333-b83f-d0f740c8308c" (UID: "75cc0f0c-aa8d-4333-b83f-d0f740c8308c"). InnerVolumeSpecName "kube-api-access-7xxrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.061790 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-scripts" (OuterVolumeSpecName: "scripts") pod "75cc0f0c-aa8d-4333-b83f-d0f740c8308c" (UID: "75cc0f0c-aa8d-4333-b83f-d0f740c8308c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.083751 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "75cc0f0c-aa8d-4333-b83f-d0f740c8308c" (UID: "75cc0f0c-aa8d-4333-b83f-d0f740c8308c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.089846 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-config-data" (OuterVolumeSpecName: "config-data") pod "75cc0f0c-aa8d-4333-b83f-d0f740c8308c" (UID: "75cc0f0c-aa8d-4333-b83f-d0f740c8308c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.118658 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.118693 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.118703 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.118711 4689 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.118721 4689 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.118729 4689 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-logs\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.118738 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xxrk\" (UniqueName: \"kubernetes.io/projected/75cc0f0c-aa8d-4333-b83f-d0f740c8308c-kube-api-access-7xxrk\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.223203 4689 generic.go:334] "Generic (PLEG): container finished" podID="68719362-13c4-4fca-8437-e68b33609d60" containerID="cc2a6a662cf75ab61080d54e18f6fc621714d2b36508c63f81b2e587c865f98b" exitCode=0 Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.223339 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-5173-account-create-5rmz2" event={"ID":"68719362-13c4-4fca-8437-e68b33609d60","Type":"ContainerDied","Data":"cc2a6a662cf75ab61080d54e18f6fc621714d2b36508c63f81b2e587c865f98b"} Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.223375 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-5173-account-create-5rmz2" event={"ID":"68719362-13c4-4fca-8437-e68b33609d60","Type":"ContainerStarted","Data":"cc28363c9eb78bf81cafa73f4f408a4f78f824bdc2d857a1a8418d4376ee1446"} Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.229637 4689 generic.go:334] "Generic (PLEG): container finished" podID="7680d8a1-9a0d-443b-8ac4-08a8e60cccc5" containerID="ebb7077e2e08d98c75e554f5b24804658672028f9d2b326aa563d3791e1ee2ce" exitCode=0 Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.229699 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-a47a-account-create-s6fdj" event={"ID":"7680d8a1-9a0d-443b-8ac4-08a8e60cccc5","Type":"ContainerDied","Data":"ebb7077e2e08d98c75e554f5b24804658672028f9d2b326aa563d3791e1ee2ce"} Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.230006 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-a47a-account-create-s6fdj" event={"ID":"7680d8a1-9a0d-443b-8ac4-08a8e60cccc5","Type":"ContainerStarted","Data":"1159631858d7f793fb68f1de6ff99719f9ef8ac658ef4a0bdb893fdf61ac7d6b"} Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.234601 4689 generic.go:334] "Generic (PLEG): container finished" podID="75cc0f0c-aa8d-4333-b83f-d0f740c8308c" containerID="e407ca308df5d7338f5db906ff946a4e1b20bf7036a0836eecc14ccccb69ed67" exitCode=137 Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.234685 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"75cc0f0c-aa8d-4333-b83f-d0f740c8308c","Type":"ContainerDied","Data":"e407ca308df5d7338f5db906ff946a4e1b20bf7036a0836eecc14ccccb69ed67"} Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.234718 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"75cc0f0c-aa8d-4333-b83f-d0f740c8308c","Type":"ContainerDied","Data":"7e6e984e3f461432ea4a7ac70822e7e40bb30e77c1f62458d5d2c367e92b2561"} Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.234747 4689 scope.go:117] "RemoveContainer" containerID="e407ca308df5d7338f5db906ff946a4e1b20bf7036a0836eecc14ccccb69ed67" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.234989 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.247860 4689 generic.go:334] "Generic (PLEG): container finished" podID="0d841e40-0d97-4ec7-a0fa-ad896fda51bf" containerID="bd9a6c99b167e2d144aa438bfe28e8259cebd1fecc61b43dbfdd08b01cdc28fe" exitCode=0 Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.247945 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5f42-account-create-jvccr" event={"ID":"0d841e40-0d97-4ec7-a0fa-ad896fda51bf","Type":"ContainerDied","Data":"bd9a6c99b167e2d144aa438bfe28e8259cebd1fecc61b43dbfdd08b01cdc28fe"} Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.248037 4689 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.248048 4689 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.306333 4689 scope.go:117] "RemoveContainer" containerID="6ae38d26a469f37d38dedb509f497b4336e5a05b26f5b28894df278d1d295c4d" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.315444 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.337961 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.351701 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 13 21:28:34 crc kubenswrapper[4689]: E1013 21:28:34.352184 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75cc0f0c-aa8d-4333-b83f-d0f740c8308c" containerName="cinder-api-log" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.352203 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="75cc0f0c-aa8d-4333-b83f-d0f740c8308c" containerName="cinder-api-log" Oct 13 21:28:34 crc kubenswrapper[4689]: E1013 21:28:34.352239 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75cc0f0c-aa8d-4333-b83f-d0f740c8308c" containerName="cinder-api" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.352246 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="75cc0f0c-aa8d-4333-b83f-d0f740c8308c" containerName="cinder-api" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.352411 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="75cc0f0c-aa8d-4333-b83f-d0f740c8308c" containerName="cinder-api" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.352449 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="75cc0f0c-aa8d-4333-b83f-d0f740c8308c" containerName="cinder-api-log" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.353395 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.357088 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.357296 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.357174 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.364365 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.427118 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-config-data-custom\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.427158 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.427215 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.429072 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2e47a212-9a97-447e-97d9-2686a2937a05-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.429445 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-scripts\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.429474 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-config-data\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.429810 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.430867 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhwgr\" (UniqueName: \"kubernetes.io/projected/2e47a212-9a97-447e-97d9-2686a2937a05-kube-api-access-zhwgr\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.430906 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e47a212-9a97-447e-97d9-2686a2937a05-logs\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.435872 4689 scope.go:117] "RemoveContainer" containerID="e407ca308df5d7338f5db906ff946a4e1b20bf7036a0836eecc14ccccb69ed67" Oct 13 21:28:34 crc kubenswrapper[4689]: E1013 21:28:34.437103 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e407ca308df5d7338f5db906ff946a4e1b20bf7036a0836eecc14ccccb69ed67\": container with ID starting with e407ca308df5d7338f5db906ff946a4e1b20bf7036a0836eecc14ccccb69ed67 not found: ID does not exist" containerID="e407ca308df5d7338f5db906ff946a4e1b20bf7036a0836eecc14ccccb69ed67" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.437155 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e407ca308df5d7338f5db906ff946a4e1b20bf7036a0836eecc14ccccb69ed67"} err="failed to get container status \"e407ca308df5d7338f5db906ff946a4e1b20bf7036a0836eecc14ccccb69ed67\": rpc error: code = NotFound desc = could not find container \"e407ca308df5d7338f5db906ff946a4e1b20bf7036a0836eecc14ccccb69ed67\": container with ID starting with e407ca308df5d7338f5db906ff946a4e1b20bf7036a0836eecc14ccccb69ed67 not found: ID does not exist" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.437188 4689 scope.go:117] "RemoveContainer" containerID="6ae38d26a469f37d38dedb509f497b4336e5a05b26f5b28894df278d1d295c4d" Oct 13 21:28:34 crc kubenswrapper[4689]: E1013 21:28:34.437529 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ae38d26a469f37d38dedb509f497b4336e5a05b26f5b28894df278d1d295c4d\": container with ID starting with 6ae38d26a469f37d38dedb509f497b4336e5a05b26f5b28894df278d1d295c4d not found: ID does not exist" containerID="6ae38d26a469f37d38dedb509f497b4336e5a05b26f5b28894df278d1d295c4d" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.437554 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ae38d26a469f37d38dedb509f497b4336e5a05b26f5b28894df278d1d295c4d"} err="failed to get container status \"6ae38d26a469f37d38dedb509f497b4336e5a05b26f5b28894df278d1d295c4d\": rpc error: code = NotFound desc = could not find container \"6ae38d26a469f37d38dedb509f497b4336e5a05b26f5b28894df278d1d295c4d\": container with ID starting with 6ae38d26a469f37d38dedb509f497b4336e5a05b26f5b28894df278d1d295c4d not found: ID does not exist" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.535321 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.536475 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.536671 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.536784 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2e47a212-9a97-447e-97d9-2686a2937a05-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.536853 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-scripts\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.536884 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-config-data\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.536939 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.537213 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2e47a212-9a97-447e-97d9-2686a2937a05-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.540098 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhwgr\" (UniqueName: \"kubernetes.io/projected/2e47a212-9a97-447e-97d9-2686a2937a05-kube-api-access-zhwgr\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.540210 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e47a212-9a97-447e-97d9-2686a2937a05-logs\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.540295 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-config-data-custom\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.541256 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e47a212-9a97-447e-97d9-2686a2937a05-logs\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.542188 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.559327 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-config-data-custom\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.564282 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.565034 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-config-data\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.565322 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-scripts\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.567013 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e47a212-9a97-447e-97d9-2686a2937a05-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.570273 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhwgr\" (UniqueName: \"kubernetes.io/projected/2e47a212-9a97-447e-97d9-2686a2937a05-kube-api-access-zhwgr\") pod \"cinder-api-0\" (UID: \"2e47a212-9a97-447e-97d9-2686a2937a05\") " pod="openstack/cinder-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.661661 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 13 21:28:34 crc kubenswrapper[4689]: I1013 21:28:34.727549 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 13 21:28:35 crc kubenswrapper[4689]: I1013 21:28:35.280668 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"692b35cd-fab7-432a-a19c-046f4684dcef","Type":"ContainerStarted","Data":"67e0e858f32e4d7063a55054e9cbaa53cffd6595aba45e7d7c09762441105227"} Oct 13 21:28:35 crc kubenswrapper[4689]: I1013 21:28:35.315755 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.728738297 podStartE2EDuration="5.315735341s" podCreationTimestamp="2025-10-13 21:28:30 +0000 UTC" firstStartedPulling="2025-10-13 21:28:31.010802902 +0000 UTC m=+1027.929048017" lastFinishedPulling="2025-10-13 21:28:34.597799976 +0000 UTC m=+1031.516045061" observedRunningTime="2025-10-13 21:28:35.313365076 +0000 UTC m=+1032.231610161" watchObservedRunningTime="2025-10-13 21:28:35.315735341 +0000 UTC m=+1032.233980416" Oct 13 21:28:35 crc kubenswrapper[4689]: I1013 21:28:35.333664 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 13 21:28:35 crc kubenswrapper[4689]: I1013 21:28:35.803640 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 13 21:28:35 crc kubenswrapper[4689]: I1013 21:28:35.803967 4689 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 13 21:28:35 crc kubenswrapper[4689]: I1013 21:28:35.864052 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-a47a-account-create-s6fdj" Oct 13 21:28:35 crc kubenswrapper[4689]: I1013 21:28:35.864115 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5173-account-create-5rmz2" Oct 13 21:28:35 crc kubenswrapper[4689]: I1013 21:28:35.866040 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f42-account-create-jvccr" Oct 13 21:28:35 crc kubenswrapper[4689]: I1013 21:28:35.913059 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75cc0f0c-aa8d-4333-b83f-d0f740c8308c" path="/var/lib/kubelet/pods/75cc0f0c-aa8d-4333-b83f-d0f740c8308c/volumes" Oct 13 21:28:35 crc kubenswrapper[4689]: I1013 21:28:35.931997 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 13 21:28:35 crc kubenswrapper[4689]: I1013 21:28:35.975654 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ddl4\" (UniqueName: \"kubernetes.io/projected/0d841e40-0d97-4ec7-a0fa-ad896fda51bf-kube-api-access-8ddl4\") pod \"0d841e40-0d97-4ec7-a0fa-ad896fda51bf\" (UID: \"0d841e40-0d97-4ec7-a0fa-ad896fda51bf\") " Oct 13 21:28:35 crc kubenswrapper[4689]: I1013 21:28:35.975765 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ksk2\" (UniqueName: \"kubernetes.io/projected/7680d8a1-9a0d-443b-8ac4-08a8e60cccc5-kube-api-access-9ksk2\") pod \"7680d8a1-9a0d-443b-8ac4-08a8e60cccc5\" (UID: \"7680d8a1-9a0d-443b-8ac4-08a8e60cccc5\") " Oct 13 21:28:35 crc kubenswrapper[4689]: I1013 21:28:35.975822 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pghzq\" (UniqueName: \"kubernetes.io/projected/68719362-13c4-4fca-8437-e68b33609d60-kube-api-access-pghzq\") pod \"68719362-13c4-4fca-8437-e68b33609d60\" (UID: \"68719362-13c4-4fca-8437-e68b33609d60\") " Oct 13 21:28:35 crc kubenswrapper[4689]: I1013 21:28:35.986548 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7680d8a1-9a0d-443b-8ac4-08a8e60cccc5-kube-api-access-9ksk2" (OuterVolumeSpecName: "kube-api-access-9ksk2") pod "7680d8a1-9a0d-443b-8ac4-08a8e60cccc5" (UID: "7680d8a1-9a0d-443b-8ac4-08a8e60cccc5"). InnerVolumeSpecName "kube-api-access-9ksk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:35 crc kubenswrapper[4689]: I1013 21:28:35.987516 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d841e40-0d97-4ec7-a0fa-ad896fda51bf-kube-api-access-8ddl4" (OuterVolumeSpecName: "kube-api-access-8ddl4") pod "0d841e40-0d97-4ec7-a0fa-ad896fda51bf" (UID: "0d841e40-0d97-4ec7-a0fa-ad896fda51bf"). InnerVolumeSpecName "kube-api-access-8ddl4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:35 crc kubenswrapper[4689]: I1013 21:28:35.994846 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68719362-13c4-4fca-8437-e68b33609d60-kube-api-access-pghzq" (OuterVolumeSpecName: "kube-api-access-pghzq") pod "68719362-13c4-4fca-8437-e68b33609d60" (UID: "68719362-13c4-4fca-8437-e68b33609d60"). InnerVolumeSpecName "kube-api-access-pghzq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:36 crc kubenswrapper[4689]: I1013 21:28:36.079171 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ddl4\" (UniqueName: \"kubernetes.io/projected/0d841e40-0d97-4ec7-a0fa-ad896fda51bf-kube-api-access-8ddl4\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:36 crc kubenswrapper[4689]: I1013 21:28:36.079201 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ksk2\" (UniqueName: \"kubernetes.io/projected/7680d8a1-9a0d-443b-8ac4-08a8e60cccc5-kube-api-access-9ksk2\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:36 crc kubenswrapper[4689]: I1013 21:28:36.079211 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pghzq\" (UniqueName: \"kubernetes.io/projected/68719362-13c4-4fca-8437-e68b33609d60-kube-api-access-pghzq\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:36 crc kubenswrapper[4689]: I1013 21:28:36.295686 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5f42-account-create-jvccr" event={"ID":"0d841e40-0d97-4ec7-a0fa-ad896fda51bf","Type":"ContainerDied","Data":"6cbc6404777335e2ee7629780d223388f96b83b1bbdaefe6d9209d2f4456b474"} Oct 13 21:28:36 crc kubenswrapper[4689]: I1013 21:28:36.295740 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6cbc6404777335e2ee7629780d223388f96b83b1bbdaefe6d9209d2f4456b474" Oct 13 21:28:36 crc kubenswrapper[4689]: I1013 21:28:36.295902 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f42-account-create-jvccr" Oct 13 21:28:36 crc kubenswrapper[4689]: I1013 21:28:36.297288 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2e47a212-9a97-447e-97d9-2686a2937a05","Type":"ContainerStarted","Data":"9e7c00ddd14fbbac6b3e0b23ef492b6ccef14993cf75d61a2c8b140f2c3bc65d"} Oct 13 21:28:36 crc kubenswrapper[4689]: I1013 21:28:36.299061 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-5173-account-create-5rmz2" event={"ID":"68719362-13c4-4fca-8437-e68b33609d60","Type":"ContainerDied","Data":"cc28363c9eb78bf81cafa73f4f408a4f78f824bdc2d857a1a8418d4376ee1446"} Oct 13 21:28:36 crc kubenswrapper[4689]: I1013 21:28:36.299127 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc28363c9eb78bf81cafa73f4f408a4f78f824bdc2d857a1a8418d4376ee1446" Oct 13 21:28:36 crc kubenswrapper[4689]: I1013 21:28:36.299138 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5173-account-create-5rmz2" Oct 13 21:28:36 crc kubenswrapper[4689]: I1013 21:28:36.303434 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-a47a-account-create-s6fdj" Oct 13 21:28:36 crc kubenswrapper[4689]: I1013 21:28:36.303709 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-a47a-account-create-s6fdj" event={"ID":"7680d8a1-9a0d-443b-8ac4-08a8e60cccc5","Type":"ContainerDied","Data":"1159631858d7f793fb68f1de6ff99719f9ef8ac658ef4a0bdb893fdf61ac7d6b"} Oct 13 21:28:36 crc kubenswrapper[4689]: I1013 21:28:36.303769 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1159631858d7f793fb68f1de6ff99719f9ef8ac658ef4a0bdb893fdf61ac7d6b" Oct 13 21:28:36 crc kubenswrapper[4689]: I1013 21:28:36.309550 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.310095 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2e47a212-9a97-447e-97d9-2686a2937a05","Type":"ContainerStarted","Data":"03adc5202f97244cfc9046353cad03652b6a710f037da3fbb2251d371745e8d6"} Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.310154 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2e47a212-9a97-447e-97d9-2686a2937a05","Type":"ContainerStarted","Data":"f2ee7df989eb6c0df24487f4608890fa80351b92172e2b818541afb103a72758"} Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.329772 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.329756737 podStartE2EDuration="3.329756737s" podCreationTimestamp="2025-10-13 21:28:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:28:37.328130399 +0000 UTC m=+1034.246375484" watchObservedRunningTime="2025-10-13 21:28:37.329756737 +0000 UTC m=+1034.248001822" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.681769 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6cfgv"] Oct 13 21:28:37 crc kubenswrapper[4689]: E1013 21:28:37.682333 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68719362-13c4-4fca-8437-e68b33609d60" containerName="mariadb-account-create" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.682365 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="68719362-13c4-4fca-8437-e68b33609d60" containerName="mariadb-account-create" Oct 13 21:28:37 crc kubenswrapper[4689]: E1013 21:28:37.682397 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d841e40-0d97-4ec7-a0fa-ad896fda51bf" containerName="mariadb-account-create" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.682409 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d841e40-0d97-4ec7-a0fa-ad896fda51bf" containerName="mariadb-account-create" Oct 13 21:28:37 crc kubenswrapper[4689]: E1013 21:28:37.682450 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7680d8a1-9a0d-443b-8ac4-08a8e60cccc5" containerName="mariadb-account-create" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.682462 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="7680d8a1-9a0d-443b-8ac4-08a8e60cccc5" containerName="mariadb-account-create" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.682764 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="68719362-13c4-4fca-8437-e68b33609d60" containerName="mariadb-account-create" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.682789 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="7680d8a1-9a0d-443b-8ac4-08a8e60cccc5" containerName="mariadb-account-create" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.682810 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d841e40-0d97-4ec7-a0fa-ad896fda51bf" containerName="mariadb-account-create" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.683567 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6cfgv" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.688165 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.688308 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-c6sxg" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.688652 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.692660 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6cfgv"] Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.810487 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-config-data\") pod \"nova-cell0-conductor-db-sync-6cfgv\" (UID: \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\") " pod="openstack/nova-cell0-conductor-db-sync-6cfgv" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.810816 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6cfgv\" (UID: \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\") " pod="openstack/nova-cell0-conductor-db-sync-6cfgv" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.810940 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-scripts\") pod \"nova-cell0-conductor-db-sync-6cfgv\" (UID: \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\") " pod="openstack/nova-cell0-conductor-db-sync-6cfgv" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.811014 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gjw2\" (UniqueName: \"kubernetes.io/projected/7e95bb20-d69c-4378-b16d-11856c0f4fe2-kube-api-access-8gjw2\") pod \"nova-cell0-conductor-db-sync-6cfgv\" (UID: \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\") " pod="openstack/nova-cell0-conductor-db-sync-6cfgv" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.912496 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6cfgv\" (UID: \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\") " pod="openstack/nova-cell0-conductor-db-sync-6cfgv" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.912604 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-scripts\") pod \"nova-cell0-conductor-db-sync-6cfgv\" (UID: \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\") " pod="openstack/nova-cell0-conductor-db-sync-6cfgv" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.912638 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gjw2\" (UniqueName: \"kubernetes.io/projected/7e95bb20-d69c-4378-b16d-11856c0f4fe2-kube-api-access-8gjw2\") pod \"nova-cell0-conductor-db-sync-6cfgv\" (UID: \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\") " pod="openstack/nova-cell0-conductor-db-sync-6cfgv" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.912741 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-config-data\") pod \"nova-cell0-conductor-db-sync-6cfgv\" (UID: \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\") " pod="openstack/nova-cell0-conductor-db-sync-6cfgv" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.919178 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-config-data\") pod \"nova-cell0-conductor-db-sync-6cfgv\" (UID: \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\") " pod="openstack/nova-cell0-conductor-db-sync-6cfgv" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.919499 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-scripts\") pod \"nova-cell0-conductor-db-sync-6cfgv\" (UID: \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\") " pod="openstack/nova-cell0-conductor-db-sync-6cfgv" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.921928 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6cfgv\" (UID: \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\") " pod="openstack/nova-cell0-conductor-db-sync-6cfgv" Oct 13 21:28:37 crc kubenswrapper[4689]: I1013 21:28:37.935927 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gjw2\" (UniqueName: \"kubernetes.io/projected/7e95bb20-d69c-4378-b16d-11856c0f4fe2-kube-api-access-8gjw2\") pod \"nova-cell0-conductor-db-sync-6cfgv\" (UID: \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\") " pod="openstack/nova-cell0-conductor-db-sync-6cfgv" Oct 13 21:28:38 crc kubenswrapper[4689]: I1013 21:28:38.009315 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6cfgv" Oct 13 21:28:38 crc kubenswrapper[4689]: I1013 21:28:38.317608 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 13 21:28:38 crc kubenswrapper[4689]: I1013 21:28:38.489248 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6cfgv"] Oct 13 21:28:38 crc kubenswrapper[4689]: I1013 21:28:38.665682 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:38 crc kubenswrapper[4689]: I1013 21:28:38.666236 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="692b35cd-fab7-432a-a19c-046f4684dcef" containerName="ceilometer-central-agent" containerID="cri-o://df8ea4d5bf796ae907e5a513e850fd25db9e6feba50248f80ca51f7ed6d8bc6e" gracePeriod=30 Oct 13 21:28:38 crc kubenswrapper[4689]: I1013 21:28:38.666731 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="692b35cd-fab7-432a-a19c-046f4684dcef" containerName="proxy-httpd" containerID="cri-o://67e0e858f32e4d7063a55054e9cbaa53cffd6595aba45e7d7c09762441105227" gracePeriod=30 Oct 13 21:28:38 crc kubenswrapper[4689]: I1013 21:28:38.666793 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="692b35cd-fab7-432a-a19c-046f4684dcef" containerName="sg-core" containerID="cri-o://e81bda92c5910dd26b67da394487dc68f240781379550397e6c7f72c0b4ebd1d" gracePeriod=30 Oct 13 21:28:38 crc kubenswrapper[4689]: I1013 21:28:38.666828 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="692b35cd-fab7-432a-a19c-046f4684dcef" containerName="ceilometer-notification-agent" containerID="cri-o://d54b529e7a346a94ab81648f877aae3b31999dc11d96aae6150a83e58cc4bf0c" gracePeriod=30 Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.325903 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6cfgv" event={"ID":"7e95bb20-d69c-4378-b16d-11856c0f4fe2","Type":"ContainerStarted","Data":"cb2b2be86cd0a572c92483327528e7c804555370bb0c4557fcd0db97bfbeacc4"} Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.332308 4689 generic.go:334] "Generic (PLEG): container finished" podID="692b35cd-fab7-432a-a19c-046f4684dcef" containerID="67e0e858f32e4d7063a55054e9cbaa53cffd6595aba45e7d7c09762441105227" exitCode=0 Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.332341 4689 generic.go:334] "Generic (PLEG): container finished" podID="692b35cd-fab7-432a-a19c-046f4684dcef" containerID="e81bda92c5910dd26b67da394487dc68f240781379550397e6c7f72c0b4ebd1d" exitCode=2 Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.332350 4689 generic.go:334] "Generic (PLEG): container finished" podID="692b35cd-fab7-432a-a19c-046f4684dcef" containerID="d54b529e7a346a94ab81648f877aae3b31999dc11d96aae6150a83e58cc4bf0c" exitCode=0 Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.332361 4689 generic.go:334] "Generic (PLEG): container finished" podID="692b35cd-fab7-432a-a19c-046f4684dcef" containerID="df8ea4d5bf796ae907e5a513e850fd25db9e6feba50248f80ca51f7ed6d8bc6e" exitCode=0 Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.333261 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"692b35cd-fab7-432a-a19c-046f4684dcef","Type":"ContainerDied","Data":"67e0e858f32e4d7063a55054e9cbaa53cffd6595aba45e7d7c09762441105227"} Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.333297 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"692b35cd-fab7-432a-a19c-046f4684dcef","Type":"ContainerDied","Data":"e81bda92c5910dd26b67da394487dc68f240781379550397e6c7f72c0b4ebd1d"} Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.333309 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"692b35cd-fab7-432a-a19c-046f4684dcef","Type":"ContainerDied","Data":"d54b529e7a346a94ab81648f877aae3b31999dc11d96aae6150a83e58cc4bf0c"} Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.333319 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"692b35cd-fab7-432a-a19c-046f4684dcef","Type":"ContainerDied","Data":"df8ea4d5bf796ae907e5a513e850fd25db9e6feba50248f80ca51f7ed6d8bc6e"} Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.468832 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.541446 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/692b35cd-fab7-432a-a19c-046f4684dcef-run-httpd\") pod \"692b35cd-fab7-432a-a19c-046f4684dcef\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.541482 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-scripts\") pod \"692b35cd-fab7-432a-a19c-046f4684dcef\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.541528 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/692b35cd-fab7-432a-a19c-046f4684dcef-log-httpd\") pod \"692b35cd-fab7-432a-a19c-046f4684dcef\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.541564 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5zxc\" (UniqueName: \"kubernetes.io/projected/692b35cd-fab7-432a-a19c-046f4684dcef-kube-api-access-d5zxc\") pod \"692b35cd-fab7-432a-a19c-046f4684dcef\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.541636 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-combined-ca-bundle\") pod \"692b35cd-fab7-432a-a19c-046f4684dcef\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.541743 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-sg-core-conf-yaml\") pod \"692b35cd-fab7-432a-a19c-046f4684dcef\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.541780 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-config-data\") pod \"692b35cd-fab7-432a-a19c-046f4684dcef\" (UID: \"692b35cd-fab7-432a-a19c-046f4684dcef\") " Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.542051 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/692b35cd-fab7-432a-a19c-046f4684dcef-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "692b35cd-fab7-432a-a19c-046f4684dcef" (UID: "692b35cd-fab7-432a-a19c-046f4684dcef"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.542882 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/692b35cd-fab7-432a-a19c-046f4684dcef-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "692b35cd-fab7-432a-a19c-046f4684dcef" (UID: "692b35cd-fab7-432a-a19c-046f4684dcef"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.547940 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/692b35cd-fab7-432a-a19c-046f4684dcef-kube-api-access-d5zxc" (OuterVolumeSpecName: "kube-api-access-d5zxc") pod "692b35cd-fab7-432a-a19c-046f4684dcef" (UID: "692b35cd-fab7-432a-a19c-046f4684dcef"). InnerVolumeSpecName "kube-api-access-d5zxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.548995 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-scripts" (OuterVolumeSpecName: "scripts") pod "692b35cd-fab7-432a-a19c-046f4684dcef" (UID: "692b35cd-fab7-432a-a19c-046f4684dcef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.579833 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "692b35cd-fab7-432a-a19c-046f4684dcef" (UID: "692b35cd-fab7-432a-a19c-046f4684dcef"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.621945 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "692b35cd-fab7-432a-a19c-046f4684dcef" (UID: "692b35cd-fab7-432a-a19c-046f4684dcef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.640065 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-config-data" (OuterVolumeSpecName: "config-data") pod "692b35cd-fab7-432a-a19c-046f4684dcef" (UID: "692b35cd-fab7-432a-a19c-046f4684dcef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.643504 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.643540 4689 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/692b35cd-fab7-432a-a19c-046f4684dcef-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.643554 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5zxc\" (UniqueName: \"kubernetes.io/projected/692b35cd-fab7-432a-a19c-046f4684dcef-kube-api-access-d5zxc\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.643568 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.643636 4689 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.643652 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/692b35cd-fab7-432a-a19c-046f4684dcef-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:39 crc kubenswrapper[4689]: I1013 21:28:39.643667 4689 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/692b35cd-fab7-432a-a19c-046f4684dcef-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.347146 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"692b35cd-fab7-432a-a19c-046f4684dcef","Type":"ContainerDied","Data":"348c779d1ed48b3dbb3878ec2fa4cc9524e827ca79762d8dcc66bd3562c11f1f"} Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.347233 4689 scope.go:117] "RemoveContainer" containerID="67e0e858f32e4d7063a55054e9cbaa53cffd6595aba45e7d7c09762441105227" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.347341 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.390452 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.392403 4689 scope.go:117] "RemoveContainer" containerID="e81bda92c5910dd26b67da394487dc68f240781379550397e6c7f72c0b4ebd1d" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.404975 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.414737 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:40 crc kubenswrapper[4689]: E1013 21:28:40.415160 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692b35cd-fab7-432a-a19c-046f4684dcef" containerName="ceilometer-notification-agent" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.415180 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="692b35cd-fab7-432a-a19c-046f4684dcef" containerName="ceilometer-notification-agent" Oct 13 21:28:40 crc kubenswrapper[4689]: E1013 21:28:40.415195 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692b35cd-fab7-432a-a19c-046f4684dcef" containerName="sg-core" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.415201 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="692b35cd-fab7-432a-a19c-046f4684dcef" containerName="sg-core" Oct 13 21:28:40 crc kubenswrapper[4689]: E1013 21:28:40.415217 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692b35cd-fab7-432a-a19c-046f4684dcef" containerName="ceilometer-central-agent" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.415225 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="692b35cd-fab7-432a-a19c-046f4684dcef" containerName="ceilometer-central-agent" Oct 13 21:28:40 crc kubenswrapper[4689]: E1013 21:28:40.415269 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="692b35cd-fab7-432a-a19c-046f4684dcef" containerName="proxy-httpd" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.415275 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="692b35cd-fab7-432a-a19c-046f4684dcef" containerName="proxy-httpd" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.415465 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="692b35cd-fab7-432a-a19c-046f4684dcef" containerName="ceilometer-central-agent" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.415482 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="692b35cd-fab7-432a-a19c-046f4684dcef" containerName="proxy-httpd" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.415490 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="692b35cd-fab7-432a-a19c-046f4684dcef" containerName="ceilometer-notification-agent" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.415500 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="692b35cd-fab7-432a-a19c-046f4684dcef" containerName="sg-core" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.417700 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.420481 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.420840 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.444080 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.458861 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.458924 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-config-data\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.459003 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33e7dd9e-ae48-4e49-9380-95598d157eec-run-httpd\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.459060 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33e7dd9e-ae48-4e49-9380-95598d157eec-log-httpd\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.459085 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jhfw\" (UniqueName: \"kubernetes.io/projected/33e7dd9e-ae48-4e49-9380-95598d157eec-kube-api-access-7jhfw\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.459108 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-scripts\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.459195 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.560140 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33e7dd9e-ae48-4e49-9380-95598d157eec-run-httpd\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.560252 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33e7dd9e-ae48-4e49-9380-95598d157eec-log-httpd\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.560275 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jhfw\" (UniqueName: \"kubernetes.io/projected/33e7dd9e-ae48-4e49-9380-95598d157eec-kube-api-access-7jhfw\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.560292 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-scripts\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.560312 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.560376 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.560405 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-config-data\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.561215 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33e7dd9e-ae48-4e49-9380-95598d157eec-log-httpd\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.561339 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33e7dd9e-ae48-4e49-9380-95598d157eec-run-httpd\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.566273 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-scripts\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.567099 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-config-data\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.567955 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.575091 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.577462 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jhfw\" (UniqueName: \"kubernetes.io/projected/33e7dd9e-ae48-4e49-9380-95598d157eec-kube-api-access-7jhfw\") pod \"ceilometer-0\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.748744 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.777878 4689 scope.go:117] "RemoveContainer" containerID="d54b529e7a346a94ab81648f877aae3b31999dc11d96aae6150a83e58cc4bf0c" Oct 13 21:28:40 crc kubenswrapper[4689]: I1013 21:28:40.866649 4689 scope.go:117] "RemoveContainer" containerID="df8ea4d5bf796ae907e5a513e850fd25db9e6feba50248f80ca51f7ed6d8bc6e" Oct 13 21:28:41 crc kubenswrapper[4689]: I1013 21:28:41.289178 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:41 crc kubenswrapper[4689]: W1013 21:28:41.294813 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33e7dd9e_ae48_4e49_9380_95598d157eec.slice/crio-cef81ae283797dfd5b1097f81254bf59c9b4c185c3b0998b8f42d7c1b2ea87e6 WatchSource:0}: Error finding container cef81ae283797dfd5b1097f81254bf59c9b4c185c3b0998b8f42d7c1b2ea87e6: Status 404 returned error can't find the container with id cef81ae283797dfd5b1097f81254bf59c9b4c185c3b0998b8f42d7c1b2ea87e6 Oct 13 21:28:41 crc kubenswrapper[4689]: I1013 21:28:41.357106 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33e7dd9e-ae48-4e49-9380-95598d157eec","Type":"ContainerStarted","Data":"cef81ae283797dfd5b1097f81254bf59c9b4c185c3b0998b8f42d7c1b2ea87e6"} Oct 13 21:28:41 crc kubenswrapper[4689]: I1013 21:28:41.881817 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="692b35cd-fab7-432a-a19c-046f4684dcef" path="/var/lib/kubelet/pods/692b35cd-fab7-432a-a19c-046f4684dcef/volumes" Oct 13 21:28:46 crc kubenswrapper[4689]: I1013 21:28:46.401620 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33e7dd9e-ae48-4e49-9380-95598d157eec","Type":"ContainerStarted","Data":"cdfde7913d1209874913814ed26b178e791d7d505b64e2032e030a6ec3324f27"} Oct 13 21:28:46 crc kubenswrapper[4689]: I1013 21:28:46.403777 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6cfgv" event={"ID":"7e95bb20-d69c-4378-b16d-11856c0f4fe2","Type":"ContainerStarted","Data":"ec57e759577ee041e0c5636c584630a10cd9d3bdaf113824dd617fb3b838297f"} Oct 13 21:28:46 crc kubenswrapper[4689]: I1013 21:28:46.432710 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-6cfgv" podStartSLOduration=2.013367018 podStartE2EDuration="9.43267688s" podCreationTimestamp="2025-10-13 21:28:37 +0000 UTC" firstStartedPulling="2025-10-13 21:28:38.490787087 +0000 UTC m=+1035.409032172" lastFinishedPulling="2025-10-13 21:28:45.910096959 +0000 UTC m=+1042.828342034" observedRunningTime="2025-10-13 21:28:46.419668103 +0000 UTC m=+1043.337913198" watchObservedRunningTime="2025-10-13 21:28:46.43267688 +0000 UTC m=+1043.350921985" Oct 13 21:28:46 crc kubenswrapper[4689]: I1013 21:28:46.602381 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 13 21:28:47 crc kubenswrapper[4689]: I1013 21:28:47.435074 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33e7dd9e-ae48-4e49-9380-95598d157eec","Type":"ContainerStarted","Data":"9808d83d67ad3db86ed2a56e259cf6912bfdcbdebe64e570f2de8c76b9584c21"} Oct 13 21:28:48 crc kubenswrapper[4689]: I1013 21:28:48.211203 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:48 crc kubenswrapper[4689]: I1013 21:28:48.444763 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33e7dd9e-ae48-4e49-9380-95598d157eec","Type":"ContainerStarted","Data":"227be669d1006d1362f2036139725c81231a0e6fcf9f3724fc68623300ef7147"} Oct 13 21:28:49 crc kubenswrapper[4689]: I1013 21:28:49.454977 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33e7dd9e-ae48-4e49-9380-95598d157eec","Type":"ContainerStarted","Data":"8fb2bca6fc7e5951b3ebe8f0fcd41869df4d154a7531076c9b332769cc62d3c1"} Oct 13 21:28:49 crc kubenswrapper[4689]: I1013 21:28:49.455468 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 13 21:28:49 crc kubenswrapper[4689]: I1013 21:28:49.455204 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerName="proxy-httpd" containerID="cri-o://8fb2bca6fc7e5951b3ebe8f0fcd41869df4d154a7531076c9b332769cc62d3c1" gracePeriod=30 Oct 13 21:28:49 crc kubenswrapper[4689]: I1013 21:28:49.455118 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerName="ceilometer-central-agent" containerID="cri-o://cdfde7913d1209874913814ed26b178e791d7d505b64e2032e030a6ec3324f27" gracePeriod=30 Oct 13 21:28:49 crc kubenswrapper[4689]: I1013 21:28:49.455205 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerName="sg-core" containerID="cri-o://227be669d1006d1362f2036139725c81231a0e6fcf9f3724fc68623300ef7147" gracePeriod=30 Oct 13 21:28:49 crc kubenswrapper[4689]: I1013 21:28:49.455293 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerName="ceilometer-notification-agent" containerID="cri-o://9808d83d67ad3db86ed2a56e259cf6912bfdcbdebe64e570f2de8c76b9584c21" gracePeriod=30 Oct 13 21:28:49 crc kubenswrapper[4689]: I1013 21:28:49.496372 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.120669213 podStartE2EDuration="9.496333565s" podCreationTimestamp="2025-10-13 21:28:40 +0000 UTC" firstStartedPulling="2025-10-13 21:28:41.296883188 +0000 UTC m=+1038.215128273" lastFinishedPulling="2025-10-13 21:28:48.67254754 +0000 UTC m=+1045.590792625" observedRunningTime="2025-10-13 21:28:49.482095688 +0000 UTC m=+1046.400340783" watchObservedRunningTime="2025-10-13 21:28:49.496333565 +0000 UTC m=+1046.414578660" Oct 13 21:28:50 crc kubenswrapper[4689]: I1013 21:28:50.470078 4689 generic.go:334] "Generic (PLEG): container finished" podID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerID="8fb2bca6fc7e5951b3ebe8f0fcd41869df4d154a7531076c9b332769cc62d3c1" exitCode=0 Oct 13 21:28:50 crc kubenswrapper[4689]: I1013 21:28:50.470551 4689 generic.go:334] "Generic (PLEG): container finished" podID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerID="227be669d1006d1362f2036139725c81231a0e6fcf9f3724fc68623300ef7147" exitCode=2 Oct 13 21:28:50 crc kubenswrapper[4689]: I1013 21:28:50.470170 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33e7dd9e-ae48-4e49-9380-95598d157eec","Type":"ContainerDied","Data":"8fb2bca6fc7e5951b3ebe8f0fcd41869df4d154a7531076c9b332769cc62d3c1"} Oct 13 21:28:50 crc kubenswrapper[4689]: I1013 21:28:50.470637 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33e7dd9e-ae48-4e49-9380-95598d157eec","Type":"ContainerDied","Data":"227be669d1006d1362f2036139725c81231a0e6fcf9f3724fc68623300ef7147"} Oct 13 21:28:50 crc kubenswrapper[4689]: I1013 21:28:50.470650 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33e7dd9e-ae48-4e49-9380-95598d157eec","Type":"ContainerDied","Data":"9808d83d67ad3db86ed2a56e259cf6912bfdcbdebe64e570f2de8c76b9584c21"} Oct 13 21:28:50 crc kubenswrapper[4689]: I1013 21:28:50.470568 4689 generic.go:334] "Generic (PLEG): container finished" podID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerID="9808d83d67ad3db86ed2a56e259cf6912bfdcbdebe64e570f2de8c76b9584c21" exitCode=0 Oct 13 21:28:53 crc kubenswrapper[4689]: I1013 21:28:53.859422 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:28:53 crc kubenswrapper[4689]: I1013 21:28:53.859770 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:28:53 crc kubenswrapper[4689]: I1013 21:28:53.859817 4689 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:28:53 crc kubenswrapper[4689]: I1013 21:28:53.860515 4689 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ab2b61226a649c7705a70b5c1bf03941d31100bc06b01a9ba4b9500ce87dedb9"} pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 21:28:53 crc kubenswrapper[4689]: I1013 21:28:53.860560 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" containerID="cri-o://ab2b61226a649c7705a70b5c1bf03941d31100bc06b01a9ba4b9500ce87dedb9" gracePeriod=600 Oct 13 21:28:54 crc kubenswrapper[4689]: I1013 21:28:54.518572 4689 generic.go:334] "Generic (PLEG): container finished" podID="1863da92-265f-451e-a741-a184c8d3f781" containerID="ab2b61226a649c7705a70b5c1bf03941d31100bc06b01a9ba4b9500ce87dedb9" exitCode=0 Oct 13 21:28:54 crc kubenswrapper[4689]: I1013 21:28:54.518635 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerDied","Data":"ab2b61226a649c7705a70b5c1bf03941d31100bc06b01a9ba4b9500ce87dedb9"} Oct 13 21:28:54 crc kubenswrapper[4689]: I1013 21:28:54.518665 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerStarted","Data":"3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28"} Oct 13 21:28:54 crc kubenswrapper[4689]: I1013 21:28:54.518686 4689 scope.go:117] "RemoveContainer" containerID="d6e09cc8455c50704247801d05cbebdbf7631acaa5e20cfd3bbcab24f523d8ed" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.310028 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.386512 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-sg-core-conf-yaml\") pod \"33e7dd9e-ae48-4e49-9380-95598d157eec\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.386618 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33e7dd9e-ae48-4e49-9380-95598d157eec-log-httpd\") pod \"33e7dd9e-ae48-4e49-9380-95598d157eec\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.386739 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-config-data\") pod \"33e7dd9e-ae48-4e49-9380-95598d157eec\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.386772 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jhfw\" (UniqueName: \"kubernetes.io/projected/33e7dd9e-ae48-4e49-9380-95598d157eec-kube-api-access-7jhfw\") pod \"33e7dd9e-ae48-4e49-9380-95598d157eec\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.386846 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-combined-ca-bundle\") pod \"33e7dd9e-ae48-4e49-9380-95598d157eec\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.386901 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-scripts\") pod \"33e7dd9e-ae48-4e49-9380-95598d157eec\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.386947 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33e7dd9e-ae48-4e49-9380-95598d157eec-run-httpd\") pod \"33e7dd9e-ae48-4e49-9380-95598d157eec\" (UID: \"33e7dd9e-ae48-4e49-9380-95598d157eec\") " Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.387647 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33e7dd9e-ae48-4e49-9380-95598d157eec-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "33e7dd9e-ae48-4e49-9380-95598d157eec" (UID: "33e7dd9e-ae48-4e49-9380-95598d157eec"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.387827 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33e7dd9e-ae48-4e49-9380-95598d157eec-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "33e7dd9e-ae48-4e49-9380-95598d157eec" (UID: "33e7dd9e-ae48-4e49-9380-95598d157eec"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.392710 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33e7dd9e-ae48-4e49-9380-95598d157eec-kube-api-access-7jhfw" (OuterVolumeSpecName: "kube-api-access-7jhfw") pod "33e7dd9e-ae48-4e49-9380-95598d157eec" (UID: "33e7dd9e-ae48-4e49-9380-95598d157eec"). InnerVolumeSpecName "kube-api-access-7jhfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.399582 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-scripts" (OuterVolumeSpecName: "scripts") pod "33e7dd9e-ae48-4e49-9380-95598d157eec" (UID: "33e7dd9e-ae48-4e49-9380-95598d157eec"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.420376 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "33e7dd9e-ae48-4e49-9380-95598d157eec" (UID: "33e7dd9e-ae48-4e49-9380-95598d157eec"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.461636 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "33e7dd9e-ae48-4e49-9380-95598d157eec" (UID: "33e7dd9e-ae48-4e49-9380-95598d157eec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.488418 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.488448 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.488457 4689 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33e7dd9e-ae48-4e49-9380-95598d157eec-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.488467 4689 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.488475 4689 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33e7dd9e-ae48-4e49-9380-95598d157eec-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.488484 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jhfw\" (UniqueName: \"kubernetes.io/projected/33e7dd9e-ae48-4e49-9380-95598d157eec-kube-api-access-7jhfw\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.490762 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-config-data" (OuterVolumeSpecName: "config-data") pod "33e7dd9e-ae48-4e49-9380-95598d157eec" (UID: "33e7dd9e-ae48-4e49-9380-95598d157eec"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.536725 4689 generic.go:334] "Generic (PLEG): container finished" podID="7e95bb20-d69c-4378-b16d-11856c0f4fe2" containerID="ec57e759577ee041e0c5636c584630a10cd9d3bdaf113824dd617fb3b838297f" exitCode=0 Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.536813 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6cfgv" event={"ID":"7e95bb20-d69c-4378-b16d-11856c0f4fe2","Type":"ContainerDied","Data":"ec57e759577ee041e0c5636c584630a10cd9d3bdaf113824dd617fb3b838297f"} Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.539847 4689 generic.go:334] "Generic (PLEG): container finished" podID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerID="cdfde7913d1209874913814ed26b178e791d7d505b64e2032e030a6ec3324f27" exitCode=0 Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.539880 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33e7dd9e-ae48-4e49-9380-95598d157eec","Type":"ContainerDied","Data":"cdfde7913d1209874913814ed26b178e791d7d505b64e2032e030a6ec3324f27"} Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.539920 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.539938 4689 scope.go:117] "RemoveContainer" containerID="8fb2bca6fc7e5951b3ebe8f0fcd41869df4d154a7531076c9b332769cc62d3c1" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.539921 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"33e7dd9e-ae48-4e49-9380-95598d157eec","Type":"ContainerDied","Data":"cef81ae283797dfd5b1097f81254bf59c9b4c185c3b0998b8f42d7c1b2ea87e6"} Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.562489 4689 scope.go:117] "RemoveContainer" containerID="227be669d1006d1362f2036139725c81231a0e6fcf9f3724fc68623300ef7147" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.581200 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.586156 4689 scope.go:117] "RemoveContainer" containerID="9808d83d67ad3db86ed2a56e259cf6912bfdcbdebe64e570f2de8c76b9584c21" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.593046 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33e7dd9e-ae48-4e49-9380-95598d157eec-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.593114 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.605348 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:56 crc kubenswrapper[4689]: E1013 21:28:56.605873 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerName="ceilometer-central-agent" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.605900 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerName="ceilometer-central-agent" Oct 13 21:28:56 crc kubenswrapper[4689]: E1013 21:28:56.605932 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerName="proxy-httpd" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.605942 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerName="proxy-httpd" Oct 13 21:28:56 crc kubenswrapper[4689]: E1013 21:28:56.605963 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerName="ceilometer-notification-agent" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.605975 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerName="ceilometer-notification-agent" Oct 13 21:28:56 crc kubenswrapper[4689]: E1013 21:28:56.606010 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerName="sg-core" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.606018 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerName="sg-core" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.606236 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerName="ceilometer-notification-agent" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.606266 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerName="proxy-httpd" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.606285 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerName="sg-core" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.606308 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="33e7dd9e-ae48-4e49-9380-95598d157eec" containerName="ceilometer-central-agent" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.609314 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.611578 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.612055 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.631623 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.636234 4689 scope.go:117] "RemoveContainer" containerID="cdfde7913d1209874913814ed26b178e791d7d505b64e2032e030a6ec3324f27" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.653221 4689 scope.go:117] "RemoveContainer" containerID="8fb2bca6fc7e5951b3ebe8f0fcd41869df4d154a7531076c9b332769cc62d3c1" Oct 13 21:28:56 crc kubenswrapper[4689]: E1013 21:28:56.653626 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fb2bca6fc7e5951b3ebe8f0fcd41869df4d154a7531076c9b332769cc62d3c1\": container with ID starting with 8fb2bca6fc7e5951b3ebe8f0fcd41869df4d154a7531076c9b332769cc62d3c1 not found: ID does not exist" containerID="8fb2bca6fc7e5951b3ebe8f0fcd41869df4d154a7531076c9b332769cc62d3c1" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.653662 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fb2bca6fc7e5951b3ebe8f0fcd41869df4d154a7531076c9b332769cc62d3c1"} err="failed to get container status \"8fb2bca6fc7e5951b3ebe8f0fcd41869df4d154a7531076c9b332769cc62d3c1\": rpc error: code = NotFound desc = could not find container \"8fb2bca6fc7e5951b3ebe8f0fcd41869df4d154a7531076c9b332769cc62d3c1\": container with ID starting with 8fb2bca6fc7e5951b3ebe8f0fcd41869df4d154a7531076c9b332769cc62d3c1 not found: ID does not exist" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.653687 4689 scope.go:117] "RemoveContainer" containerID="227be669d1006d1362f2036139725c81231a0e6fcf9f3724fc68623300ef7147" Oct 13 21:28:56 crc kubenswrapper[4689]: E1013 21:28:56.653975 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"227be669d1006d1362f2036139725c81231a0e6fcf9f3724fc68623300ef7147\": container with ID starting with 227be669d1006d1362f2036139725c81231a0e6fcf9f3724fc68623300ef7147 not found: ID does not exist" containerID="227be669d1006d1362f2036139725c81231a0e6fcf9f3724fc68623300ef7147" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.654028 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"227be669d1006d1362f2036139725c81231a0e6fcf9f3724fc68623300ef7147"} err="failed to get container status \"227be669d1006d1362f2036139725c81231a0e6fcf9f3724fc68623300ef7147\": rpc error: code = NotFound desc = could not find container \"227be669d1006d1362f2036139725c81231a0e6fcf9f3724fc68623300ef7147\": container with ID starting with 227be669d1006d1362f2036139725c81231a0e6fcf9f3724fc68623300ef7147 not found: ID does not exist" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.654056 4689 scope.go:117] "RemoveContainer" containerID="9808d83d67ad3db86ed2a56e259cf6912bfdcbdebe64e570f2de8c76b9584c21" Oct 13 21:28:56 crc kubenswrapper[4689]: E1013 21:28:56.654418 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9808d83d67ad3db86ed2a56e259cf6912bfdcbdebe64e570f2de8c76b9584c21\": container with ID starting with 9808d83d67ad3db86ed2a56e259cf6912bfdcbdebe64e570f2de8c76b9584c21 not found: ID does not exist" containerID="9808d83d67ad3db86ed2a56e259cf6912bfdcbdebe64e570f2de8c76b9584c21" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.654458 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9808d83d67ad3db86ed2a56e259cf6912bfdcbdebe64e570f2de8c76b9584c21"} err="failed to get container status \"9808d83d67ad3db86ed2a56e259cf6912bfdcbdebe64e570f2de8c76b9584c21\": rpc error: code = NotFound desc = could not find container \"9808d83d67ad3db86ed2a56e259cf6912bfdcbdebe64e570f2de8c76b9584c21\": container with ID starting with 9808d83d67ad3db86ed2a56e259cf6912bfdcbdebe64e570f2de8c76b9584c21 not found: ID does not exist" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.654480 4689 scope.go:117] "RemoveContainer" containerID="cdfde7913d1209874913814ed26b178e791d7d505b64e2032e030a6ec3324f27" Oct 13 21:28:56 crc kubenswrapper[4689]: E1013 21:28:56.654725 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdfde7913d1209874913814ed26b178e791d7d505b64e2032e030a6ec3324f27\": container with ID starting with cdfde7913d1209874913814ed26b178e791d7d505b64e2032e030a6ec3324f27 not found: ID does not exist" containerID="cdfde7913d1209874913814ed26b178e791d7d505b64e2032e030a6ec3324f27" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.654750 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdfde7913d1209874913814ed26b178e791d7d505b64e2032e030a6ec3324f27"} err="failed to get container status \"cdfde7913d1209874913814ed26b178e791d7d505b64e2032e030a6ec3324f27\": rpc error: code = NotFound desc = could not find container \"cdfde7913d1209874913814ed26b178e791d7d505b64e2032e030a6ec3324f27\": container with ID starting with cdfde7913d1209874913814ed26b178e791d7d505b64e2032e030a6ec3324f27 not found: ID does not exist" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.695007 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zc82k\" (UniqueName: \"kubernetes.io/projected/4dae00fa-4534-4058-b6c1-d416074d2f7d-kube-api-access-zc82k\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.695143 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-config-data\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.695233 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dae00fa-4534-4058-b6c1-d416074d2f7d-run-httpd\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.695411 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.695508 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.695598 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-scripts\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.695724 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dae00fa-4534-4058-b6c1-d416074d2f7d-log-httpd\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.797555 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.797616 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-scripts\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.797677 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dae00fa-4534-4058-b6c1-d416074d2f7d-log-httpd\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.797748 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zc82k\" (UniqueName: \"kubernetes.io/projected/4dae00fa-4534-4058-b6c1-d416074d2f7d-kube-api-access-zc82k\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.797777 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-config-data\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.797807 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dae00fa-4534-4058-b6c1-d416074d2f7d-run-httpd\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.797826 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.799299 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dae00fa-4534-4058-b6c1-d416074d2f7d-log-httpd\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.799607 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dae00fa-4534-4058-b6c1-d416074d2f7d-run-httpd\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.802651 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.802794 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-scripts\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.808610 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.809425 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-config-data\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.816259 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zc82k\" (UniqueName: \"kubernetes.io/projected/4dae00fa-4534-4058-b6c1-d416074d2f7d-kube-api-access-zc82k\") pod \"ceilometer-0\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " pod="openstack/ceilometer-0" Oct 13 21:28:56 crc kubenswrapper[4689]: I1013 21:28:56.933497 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:28:57 crc kubenswrapper[4689]: I1013 21:28:57.397821 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:28:57 crc kubenswrapper[4689]: I1013 21:28:57.550718 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dae00fa-4534-4058-b6c1-d416074d2f7d","Type":"ContainerStarted","Data":"cf6e5d925a0de9c1d7712e84f9bd80736b3b80c607148e8db491bbfa40d51e4f"} Oct 13 21:28:57 crc kubenswrapper[4689]: I1013 21:28:57.866485 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6cfgv" Oct 13 21:28:57 crc kubenswrapper[4689]: I1013 21:28:57.878849 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33e7dd9e-ae48-4e49-9380-95598d157eec" path="/var/lib/kubelet/pods/33e7dd9e-ae48-4e49-9380-95598d157eec/volumes" Oct 13 21:28:57 crc kubenswrapper[4689]: I1013 21:28:57.945390 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-combined-ca-bundle\") pod \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\" (UID: \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\") " Oct 13 21:28:57 crc kubenswrapper[4689]: I1013 21:28:57.945719 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8gjw2\" (UniqueName: \"kubernetes.io/projected/7e95bb20-d69c-4378-b16d-11856c0f4fe2-kube-api-access-8gjw2\") pod \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\" (UID: \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\") " Oct 13 21:28:57 crc kubenswrapper[4689]: I1013 21:28:57.945803 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-config-data\") pod \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\" (UID: \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\") " Oct 13 21:28:57 crc kubenswrapper[4689]: I1013 21:28:57.945837 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-scripts\") pod \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\" (UID: \"7e95bb20-d69c-4378-b16d-11856c0f4fe2\") " Oct 13 21:28:57 crc kubenswrapper[4689]: I1013 21:28:57.951299 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e95bb20-d69c-4378-b16d-11856c0f4fe2-kube-api-access-8gjw2" (OuterVolumeSpecName: "kube-api-access-8gjw2") pod "7e95bb20-d69c-4378-b16d-11856c0f4fe2" (UID: "7e95bb20-d69c-4378-b16d-11856c0f4fe2"). InnerVolumeSpecName "kube-api-access-8gjw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:28:57 crc kubenswrapper[4689]: I1013 21:28:57.951649 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-scripts" (OuterVolumeSpecName: "scripts") pod "7e95bb20-d69c-4378-b16d-11856c0f4fe2" (UID: "7e95bb20-d69c-4378-b16d-11856c0f4fe2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:57 crc kubenswrapper[4689]: I1013 21:28:57.980241 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-config-data" (OuterVolumeSpecName: "config-data") pod "7e95bb20-d69c-4378-b16d-11856c0f4fe2" (UID: "7e95bb20-d69c-4378-b16d-11856c0f4fe2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:57 crc kubenswrapper[4689]: I1013 21:28:57.980721 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e95bb20-d69c-4378-b16d-11856c0f4fe2" (UID: "7e95bb20-d69c-4378-b16d-11856c0f4fe2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.047653 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.047683 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.047692 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e95bb20-d69c-4378-b16d-11856c0f4fe2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.047704 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8gjw2\" (UniqueName: \"kubernetes.io/projected/7e95bb20-d69c-4378-b16d-11856c0f4fe2-kube-api-access-8gjw2\") on node \"crc\" DevicePath \"\"" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.562811 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6cfgv" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.564439 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6cfgv" event={"ID":"7e95bb20-d69c-4378-b16d-11856c0f4fe2","Type":"ContainerDied","Data":"cb2b2be86cd0a572c92483327528e7c804555370bb0c4557fcd0db97bfbeacc4"} Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.564478 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb2b2be86cd0a572c92483327528e7c804555370bb0c4557fcd0db97bfbeacc4" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.566199 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dae00fa-4534-4058-b6c1-d416074d2f7d","Type":"ContainerStarted","Data":"f28ca62d92901de8d9af61c5eaafce83b62a3c82837fdae01b7acdb9feedd249"} Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.779670 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 13 21:28:58 crc kubenswrapper[4689]: E1013 21:28:58.780063 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e95bb20-d69c-4378-b16d-11856c0f4fe2" containerName="nova-cell0-conductor-db-sync" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.780085 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e95bb20-d69c-4378-b16d-11856c0f4fe2" containerName="nova-cell0-conductor-db-sync" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.780268 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e95bb20-d69c-4378-b16d-11856c0f4fe2" containerName="nova-cell0-conductor-db-sync" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.780996 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.788247 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-c6sxg" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.788554 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.824656 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.862656 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d4d915e-c42b-4389-bd38-49fc12bc950a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3d4d915e-c42b-4389-bd38-49fc12bc950a\") " pod="openstack/nova-cell0-conductor-0" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.862807 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d4d915e-c42b-4389-bd38-49fc12bc950a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3d4d915e-c42b-4389-bd38-49fc12bc950a\") " pod="openstack/nova-cell0-conductor-0" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.862861 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mdg2\" (UniqueName: \"kubernetes.io/projected/3d4d915e-c42b-4389-bd38-49fc12bc950a-kube-api-access-8mdg2\") pod \"nova-cell0-conductor-0\" (UID: \"3d4d915e-c42b-4389-bd38-49fc12bc950a\") " pod="openstack/nova-cell0-conductor-0" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.964374 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d4d915e-c42b-4389-bd38-49fc12bc950a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3d4d915e-c42b-4389-bd38-49fc12bc950a\") " pod="openstack/nova-cell0-conductor-0" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.964455 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mdg2\" (UniqueName: \"kubernetes.io/projected/3d4d915e-c42b-4389-bd38-49fc12bc950a-kube-api-access-8mdg2\") pod \"nova-cell0-conductor-0\" (UID: \"3d4d915e-c42b-4389-bd38-49fc12bc950a\") " pod="openstack/nova-cell0-conductor-0" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.964503 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d4d915e-c42b-4389-bd38-49fc12bc950a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3d4d915e-c42b-4389-bd38-49fc12bc950a\") " pod="openstack/nova-cell0-conductor-0" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.970342 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d4d915e-c42b-4389-bd38-49fc12bc950a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3d4d915e-c42b-4389-bd38-49fc12bc950a\") " pod="openstack/nova-cell0-conductor-0" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.979346 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mdg2\" (UniqueName: \"kubernetes.io/projected/3d4d915e-c42b-4389-bd38-49fc12bc950a-kube-api-access-8mdg2\") pod \"nova-cell0-conductor-0\" (UID: \"3d4d915e-c42b-4389-bd38-49fc12bc950a\") " pod="openstack/nova-cell0-conductor-0" Oct 13 21:28:58 crc kubenswrapper[4689]: I1013 21:28:58.993751 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d4d915e-c42b-4389-bd38-49fc12bc950a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3d4d915e-c42b-4389-bd38-49fc12bc950a\") " pod="openstack/nova-cell0-conductor-0" Oct 13 21:28:59 crc kubenswrapper[4689]: I1013 21:28:59.103280 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 13 21:28:59 crc kubenswrapper[4689]: I1013 21:28:59.538918 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 13 21:28:59 crc kubenswrapper[4689]: I1013 21:28:59.576851 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"3d4d915e-c42b-4389-bd38-49fc12bc950a","Type":"ContainerStarted","Data":"d06da60aeac23261874dee7c7709aaf95ff1d3b8c869e390ff21f7db6bfc7dc0"} Oct 13 21:29:00 crc kubenswrapper[4689]: I1013 21:29:00.587150 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"3d4d915e-c42b-4389-bd38-49fc12bc950a","Type":"ContainerStarted","Data":"a2ab43a823220fa667ad24d147ea59942d91da7def72f1c41a9f8b3c4cc24ebf"} Oct 13 21:29:00 crc kubenswrapper[4689]: I1013 21:29:00.588296 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 13 21:29:00 crc kubenswrapper[4689]: I1013 21:29:00.589495 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dae00fa-4534-4058-b6c1-d416074d2f7d","Type":"ContainerStarted","Data":"5ee776dda9f8a7ce96d07013693695d95da474a740a1eff403e7428993679417"} Oct 13 21:29:00 crc kubenswrapper[4689]: I1013 21:29:00.608412 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.608396839 podStartE2EDuration="2.608396839s" podCreationTimestamp="2025-10-13 21:28:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:29:00.60294419 +0000 UTC m=+1057.521189275" watchObservedRunningTime="2025-10-13 21:29:00.608396839 +0000 UTC m=+1057.526641924" Oct 13 21:29:01 crc kubenswrapper[4689]: I1013 21:29:01.603905 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dae00fa-4534-4058-b6c1-d416074d2f7d","Type":"ContainerStarted","Data":"45872f5f9957ff921395d4ab988b88c19d90a7e32db53069f3f69cea0835799d"} Oct 13 21:29:02 crc kubenswrapper[4689]: I1013 21:29:02.614041 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dae00fa-4534-4058-b6c1-d416074d2f7d","Type":"ContainerStarted","Data":"3a1ba0b926a1ff2b613f5207f9a23c0897c7452f92c897102c42881acc308de5"} Oct 13 21:29:02 crc kubenswrapper[4689]: I1013 21:29:02.615713 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 13 21:29:02 crc kubenswrapper[4689]: I1013 21:29:02.638410 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.47820768 podStartE2EDuration="6.6383877s" podCreationTimestamp="2025-10-13 21:28:56 +0000 UTC" firstStartedPulling="2025-10-13 21:28:57.408035546 +0000 UTC m=+1054.326280641" lastFinishedPulling="2025-10-13 21:29:01.568215556 +0000 UTC m=+1058.486460661" observedRunningTime="2025-10-13 21:29:02.635540693 +0000 UTC m=+1059.553785768" watchObservedRunningTime="2025-10-13 21:29:02.6383877 +0000 UTC m=+1059.556632785" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.131255 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.608481 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-5qvqz"] Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.609662 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-5qvqz" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.611368 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.612117 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.622792 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-5qvqz"] Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.691950 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-config-data\") pod \"nova-cell0-cell-mapping-5qvqz\" (UID: \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\") " pod="openstack/nova-cell0-cell-mapping-5qvqz" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.692019 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-5qvqz\" (UID: \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\") " pod="openstack/nova-cell0-cell-mapping-5qvqz" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.692247 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvk66\" (UniqueName: \"kubernetes.io/projected/2d94648e-97ea-49c5-a6f4-46b96f12ef74-kube-api-access-nvk66\") pod \"nova-cell0-cell-mapping-5qvqz\" (UID: \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\") " pod="openstack/nova-cell0-cell-mapping-5qvqz" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.692287 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-scripts\") pod \"nova-cell0-cell-mapping-5qvqz\" (UID: \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\") " pod="openstack/nova-cell0-cell-mapping-5qvqz" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.792737 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.794563 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvk66\" (UniqueName: \"kubernetes.io/projected/2d94648e-97ea-49c5-a6f4-46b96f12ef74-kube-api-access-nvk66\") pod \"nova-cell0-cell-mapping-5qvqz\" (UID: \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\") " pod="openstack/nova-cell0-cell-mapping-5qvqz" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.794613 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-scripts\") pod \"nova-cell0-cell-mapping-5qvqz\" (UID: \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\") " pod="openstack/nova-cell0-cell-mapping-5qvqz" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.794658 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.794669 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-config-data\") pod \"nova-cell0-cell-mapping-5qvqz\" (UID: \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\") " pod="openstack/nova-cell0-cell-mapping-5qvqz" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.794686 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-5qvqz\" (UID: \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\") " pod="openstack/nova-cell0-cell-mapping-5qvqz" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.801627 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.802327 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-config-data\") pod \"nova-cell0-cell-mapping-5qvqz\" (UID: \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\") " pod="openstack/nova-cell0-cell-mapping-5qvqz" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.808323 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.818003 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-scripts\") pod \"nova-cell0-cell-mapping-5qvqz\" (UID: \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\") " pod="openstack/nova-cell0-cell-mapping-5qvqz" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.824286 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-5qvqz\" (UID: \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\") " pod="openstack/nova-cell0-cell-mapping-5qvqz" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.851317 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvk66\" (UniqueName: \"kubernetes.io/projected/2d94648e-97ea-49c5-a6f4-46b96f12ef74-kube-api-access-nvk66\") pod \"nova-cell0-cell-mapping-5qvqz\" (UID: \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\") " pod="openstack/nova-cell0-cell-mapping-5qvqz" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.896742 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9074694f-45ca-42e9-b596-ddffa2726b6c-config-data\") pod \"nova-scheduler-0\" (UID: \"9074694f-45ca-42e9-b596-ddffa2726b6c\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.897171 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvzqs\" (UniqueName: \"kubernetes.io/projected/9074694f-45ca-42e9-b596-ddffa2726b6c-kube-api-access-tvzqs\") pod \"nova-scheduler-0\" (UID: \"9074694f-45ca-42e9-b596-ddffa2726b6c\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.897279 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9074694f-45ca-42e9-b596-ddffa2726b6c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9074694f-45ca-42e9-b596-ddffa2726b6c\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.947744 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.949287 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.953832 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.963790 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.991612 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-5qvqz" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.998985 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvzqs\" (UniqueName: \"kubernetes.io/projected/9074694f-45ca-42e9-b596-ddffa2726b6c-kube-api-access-tvzqs\") pod \"nova-scheduler-0\" (UID: \"9074694f-45ca-42e9-b596-ddffa2726b6c\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.999167 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9074694f-45ca-42e9-b596-ddffa2726b6c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9074694f-45ca-42e9-b596-ddffa2726b6c\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.999279 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02def983-8ec2-47ee-bc06-cb08d42795f4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"02def983-8ec2-47ee-bc06-cb08d42795f4\") " pod="openstack/nova-api-0" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.999446 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97smg\" (UniqueName: \"kubernetes.io/projected/02def983-8ec2-47ee-bc06-cb08d42795f4-kube-api-access-97smg\") pod \"nova-api-0\" (UID: \"02def983-8ec2-47ee-bc06-cb08d42795f4\") " pod="openstack/nova-api-0" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.999543 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9074694f-45ca-42e9-b596-ddffa2726b6c-config-data\") pod \"nova-scheduler-0\" (UID: \"9074694f-45ca-42e9-b596-ddffa2726b6c\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.999792 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02def983-8ec2-47ee-bc06-cb08d42795f4-config-data\") pod \"nova-api-0\" (UID: \"02def983-8ec2-47ee-bc06-cb08d42795f4\") " pod="openstack/nova-api-0" Oct 13 21:29:04 crc kubenswrapper[4689]: I1013 21:29:04.999913 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02def983-8ec2-47ee-bc06-cb08d42795f4-logs\") pod \"nova-api-0\" (UID: \"02def983-8ec2-47ee-bc06-cb08d42795f4\") " pod="openstack/nova-api-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.011791 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9074694f-45ca-42e9-b596-ddffa2726b6c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9074694f-45ca-42e9-b596-ddffa2726b6c\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.020397 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.021341 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9074694f-45ca-42e9-b596-ddffa2726b6c-config-data\") pod \"nova-scheduler-0\" (UID: \"9074694f-45ca-42e9-b596-ddffa2726b6c\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.021959 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.043503 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.082628 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.084447 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.084854 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvzqs\" (UniqueName: \"kubernetes.io/projected/9074694f-45ca-42e9-b596-ddffa2726b6c-kube-api-access-tvzqs\") pod \"nova-scheduler-0\" (UID: \"9074694f-45ca-42e9-b596-ddffa2726b6c\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.087145 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.096533 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.107390 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97smg\" (UniqueName: \"kubernetes.io/projected/02def983-8ec2-47ee-bc06-cb08d42795f4-kube-api-access-97smg\") pod \"nova-api-0\" (UID: \"02def983-8ec2-47ee-bc06-cb08d42795f4\") " pod="openstack/nova-api-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.109801 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e23c768-d1a5-4087-bb92-6497004bda51-config-data\") pod \"nova-metadata-0\" (UID: \"8e23c768-d1a5-4087-bb92-6497004bda51\") " pod="openstack/nova-metadata-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.110338 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02def983-8ec2-47ee-bc06-cb08d42795f4-config-data\") pod \"nova-api-0\" (UID: \"02def983-8ec2-47ee-bc06-cb08d42795f4\") " pod="openstack/nova-api-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.110543 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9www\" (UniqueName: \"kubernetes.io/projected/8e23c768-d1a5-4087-bb92-6497004bda51-kube-api-access-x9www\") pod \"nova-metadata-0\" (UID: \"8e23c768-d1a5-4087-bb92-6497004bda51\") " pod="openstack/nova-metadata-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.110722 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02def983-8ec2-47ee-bc06-cb08d42795f4-logs\") pod \"nova-api-0\" (UID: \"02def983-8ec2-47ee-bc06-cb08d42795f4\") " pod="openstack/nova-api-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.110976 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02def983-8ec2-47ee-bc06-cb08d42795f4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"02def983-8ec2-47ee-bc06-cb08d42795f4\") " pod="openstack/nova-api-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.111099 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e23c768-d1a5-4087-bb92-6497004bda51-logs\") pod \"nova-metadata-0\" (UID: \"8e23c768-d1a5-4087-bb92-6497004bda51\") " pod="openstack/nova-metadata-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.111346 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e23c768-d1a5-4087-bb92-6497004bda51-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8e23c768-d1a5-4087-bb92-6497004bda51\") " pod="openstack/nova-metadata-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.116849 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02def983-8ec2-47ee-bc06-cb08d42795f4-logs\") pod \"nova-api-0\" (UID: \"02def983-8ec2-47ee-bc06-cb08d42795f4\") " pod="openstack/nova-api-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.130364 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02def983-8ec2-47ee-bc06-cb08d42795f4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"02def983-8ec2-47ee-bc06-cb08d42795f4\") " pod="openstack/nova-api-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.131031 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02def983-8ec2-47ee-bc06-cb08d42795f4-config-data\") pod \"nova-api-0\" (UID: \"02def983-8ec2-47ee-bc06-cb08d42795f4\") " pod="openstack/nova-api-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.151901 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.174150 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97smg\" (UniqueName: \"kubernetes.io/projected/02def983-8ec2-47ee-bc06-cb08d42795f4-kube-api-access-97smg\") pod \"nova-api-0\" (UID: \"02def983-8ec2-47ee-bc06-cb08d42795f4\") " pod="openstack/nova-api-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.209293 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-2rg4h"] Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.211176 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.217846 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-2rg4h"] Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.218812 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/762ff29c-7685-452a-bd06-561632aed1d9-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"762ff29c-7685-452a-bd06-561632aed1d9\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.218915 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9www\" (UniqueName: \"kubernetes.io/projected/8e23c768-d1a5-4087-bb92-6497004bda51-kube-api-access-x9www\") pod \"nova-metadata-0\" (UID: \"8e23c768-d1a5-4087-bb92-6497004bda51\") " pod="openstack/nova-metadata-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.219016 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/762ff29c-7685-452a-bd06-561632aed1d9-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"762ff29c-7685-452a-bd06-561632aed1d9\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.219062 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e23c768-d1a5-4087-bb92-6497004bda51-logs\") pod \"nova-metadata-0\" (UID: \"8e23c768-d1a5-4087-bb92-6497004bda51\") " pod="openstack/nova-metadata-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.219118 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cql5h\" (UniqueName: \"kubernetes.io/projected/762ff29c-7685-452a-bd06-561632aed1d9-kube-api-access-cql5h\") pod \"nova-cell1-novncproxy-0\" (UID: \"762ff29c-7685-452a-bd06-561632aed1d9\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.219150 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e23c768-d1a5-4087-bb92-6497004bda51-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8e23c768-d1a5-4087-bb92-6497004bda51\") " pod="openstack/nova-metadata-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.219199 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e23c768-d1a5-4087-bb92-6497004bda51-config-data\") pod \"nova-metadata-0\" (UID: \"8e23c768-d1a5-4087-bb92-6497004bda51\") " pod="openstack/nova-metadata-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.219704 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e23c768-d1a5-4087-bb92-6497004bda51-logs\") pod \"nova-metadata-0\" (UID: \"8e23c768-d1a5-4087-bb92-6497004bda51\") " pod="openstack/nova-metadata-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.224580 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e23c768-d1a5-4087-bb92-6497004bda51-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8e23c768-d1a5-4087-bb92-6497004bda51\") " pod="openstack/nova-metadata-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.227195 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e23c768-d1a5-4087-bb92-6497004bda51-config-data\") pod \"nova-metadata-0\" (UID: \"8e23c768-d1a5-4087-bb92-6497004bda51\") " pod="openstack/nova-metadata-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.236045 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.244057 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9www\" (UniqueName: \"kubernetes.io/projected/8e23c768-d1a5-4087-bb92-6497004bda51-kube-api-access-x9www\") pod \"nova-metadata-0\" (UID: \"8e23c768-d1a5-4087-bb92-6497004bda51\") " pod="openstack/nova-metadata-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.280238 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.320437 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/762ff29c-7685-452a-bd06-561632aed1d9-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"762ff29c-7685-452a-bd06-561632aed1d9\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.320482 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-config\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.320512 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-dns-svc\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.320542 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52p4w\" (UniqueName: \"kubernetes.io/projected/1a79fbe5-a692-405a-88f9-4bd7f011ecad-kube-api-access-52p4w\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.320628 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/762ff29c-7685-452a-bd06-561632aed1d9-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"762ff29c-7685-452a-bd06-561632aed1d9\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.320660 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.320684 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cql5h\" (UniqueName: \"kubernetes.io/projected/762ff29c-7685-452a-bd06-561632aed1d9-kube-api-access-cql5h\") pod \"nova-cell1-novncproxy-0\" (UID: \"762ff29c-7685-452a-bd06-561632aed1d9\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.320727 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.320780 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.329972 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/762ff29c-7685-452a-bd06-561632aed1d9-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"762ff29c-7685-452a-bd06-561632aed1d9\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.332394 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/762ff29c-7685-452a-bd06-561632aed1d9-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"762ff29c-7685-452a-bd06-561632aed1d9\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.344352 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cql5h\" (UniqueName: \"kubernetes.io/projected/762ff29c-7685-452a-bd06-561632aed1d9-kube-api-access-cql5h\") pod \"nova-cell1-novncproxy-0\" (UID: \"762ff29c-7685-452a-bd06-561632aed1d9\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.427182 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-config\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.427236 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-dns-svc\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.427260 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52p4w\" (UniqueName: \"kubernetes.io/projected/1a79fbe5-a692-405a-88f9-4bd7f011ecad-kube-api-access-52p4w\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.427330 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.427365 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.427411 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.428295 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-config\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.428297 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.428825 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.428872 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-dns-svc\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.429369 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.445380 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52p4w\" (UniqueName: \"kubernetes.io/projected/1a79fbe5-a692-405a-88f9-4bd7f011ecad-kube-api-access-52p4w\") pod \"dnsmasq-dns-757b4f8459-2rg4h\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.509684 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.535238 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.538417 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.543046 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.659706 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9074694f-45ca-42e9-b596-ddffa2726b6c","Type":"ContainerStarted","Data":"fc7c35b3ffb6f5fea2f70d364be18140f9f418796983bc470406a13c4ac4c682"} Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.809820 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-5qvqz"] Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.921682 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pbwb5"] Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.923205 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-pbwb5" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.934528 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.934887 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.935228 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 13 21:29:05 crc kubenswrapper[4689]: I1013 21:29:05.956119 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pbwb5"] Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.049270 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-scripts\") pod \"nova-cell1-conductor-db-sync-pbwb5\" (UID: \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\") " pod="openstack/nova-cell1-conductor-db-sync-pbwb5" Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.049510 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-pbwb5\" (UID: \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\") " pod="openstack/nova-cell1-conductor-db-sync-pbwb5" Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.049790 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fjks\" (UniqueName: \"kubernetes.io/projected/9590d1d7-97f4-426a-a0e0-8f4d941489bc-kube-api-access-6fjks\") pod \"nova-cell1-conductor-db-sync-pbwb5\" (UID: \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\") " pod="openstack/nova-cell1-conductor-db-sync-pbwb5" Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.049851 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-config-data\") pod \"nova-cell1-conductor-db-sync-pbwb5\" (UID: \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\") " pod="openstack/nova-cell1-conductor-db-sync-pbwb5" Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.082807 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:29:06 crc kubenswrapper[4689]: W1013 21:29:06.094464 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e23c768_d1a5_4087_bb92_6497004bda51.slice/crio-a3b1ea12c26d4b44cdc87b6dfdc42c7c0fd3d364f08d206f0aa0ea76d2b1239b WatchSource:0}: Error finding container a3b1ea12c26d4b44cdc87b6dfdc42c7c0fd3d364f08d206f0aa0ea76d2b1239b: Status 404 returned error can't find the container with id a3b1ea12c26d4b44cdc87b6dfdc42c7c0fd3d364f08d206f0aa0ea76d2b1239b Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.151944 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fjks\" (UniqueName: \"kubernetes.io/projected/9590d1d7-97f4-426a-a0e0-8f4d941489bc-kube-api-access-6fjks\") pod \"nova-cell1-conductor-db-sync-pbwb5\" (UID: \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\") " pod="openstack/nova-cell1-conductor-db-sync-pbwb5" Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.152014 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-config-data\") pod \"nova-cell1-conductor-db-sync-pbwb5\" (UID: \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\") " pod="openstack/nova-cell1-conductor-db-sync-pbwb5" Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.152097 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-scripts\") pod \"nova-cell1-conductor-db-sync-pbwb5\" (UID: \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\") " pod="openstack/nova-cell1-conductor-db-sync-pbwb5" Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.152183 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-pbwb5\" (UID: \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\") " pod="openstack/nova-cell1-conductor-db-sync-pbwb5" Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.157998 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-scripts\") pod \"nova-cell1-conductor-db-sync-pbwb5\" (UID: \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\") " pod="openstack/nova-cell1-conductor-db-sync-pbwb5" Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.169543 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-pbwb5\" (UID: \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\") " pod="openstack/nova-cell1-conductor-db-sync-pbwb5" Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.171221 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-config-data\") pod \"nova-cell1-conductor-db-sync-pbwb5\" (UID: \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\") " pod="openstack/nova-cell1-conductor-db-sync-pbwb5" Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.171479 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fjks\" (UniqueName: \"kubernetes.io/projected/9590d1d7-97f4-426a-a0e0-8f4d941489bc-kube-api-access-6fjks\") pod \"nova-cell1-conductor-db-sync-pbwb5\" (UID: \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\") " pod="openstack/nova-cell1-conductor-db-sync-pbwb5" Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.181371 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.290384 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-2rg4h"] Oct 13 21:29:06 crc kubenswrapper[4689]: W1013 21:29:06.304382 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a79fbe5_a692_405a_88f9_4bd7f011ecad.slice/crio-21f192a80fffb125339e7127cf728d99f5193479b80c4e01cea0c58b878570f2 WatchSource:0}: Error finding container 21f192a80fffb125339e7127cf728d99f5193479b80c4e01cea0c58b878570f2: Status 404 returned error can't find the container with id 21f192a80fffb125339e7127cf728d99f5193479b80c4e01cea0c58b878570f2 Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.352791 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-pbwb5" Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.675204 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"762ff29c-7685-452a-bd06-561632aed1d9","Type":"ContainerStarted","Data":"ab65a88a21ab41915d9195e8d168fb13a5e8e89b0ba8f7a1b02233edd03e9d3a"} Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.677229 4689 generic.go:334] "Generic (PLEG): container finished" podID="1a79fbe5-a692-405a-88f9-4bd7f011ecad" containerID="b17dfa5b7a6d7615fb94ba281ccf0be06bdf1178e23d7b87523006e66c620b73" exitCode=0 Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.677289 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" event={"ID":"1a79fbe5-a692-405a-88f9-4bd7f011ecad","Type":"ContainerDied","Data":"b17dfa5b7a6d7615fb94ba281ccf0be06bdf1178e23d7b87523006e66c620b73"} Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.677311 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" event={"ID":"1a79fbe5-a692-405a-88f9-4bd7f011ecad","Type":"ContainerStarted","Data":"21f192a80fffb125339e7127cf728d99f5193479b80c4e01cea0c58b878570f2"} Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.679787 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8e23c768-d1a5-4087-bb92-6497004bda51","Type":"ContainerStarted","Data":"a3b1ea12c26d4b44cdc87b6dfdc42c7c0fd3d364f08d206f0aa0ea76d2b1239b"} Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.684862 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-5qvqz" event={"ID":"2d94648e-97ea-49c5-a6f4-46b96f12ef74","Type":"ContainerStarted","Data":"0e775227d4a03e315be12d2f6903db3dd4dfd3953bdc834df6f36e1f186887e9"} Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.684916 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-5qvqz" event={"ID":"2d94648e-97ea-49c5-a6f4-46b96f12ef74","Type":"ContainerStarted","Data":"071a6e7e38b81495bd755b1c16333e46bd8783aa10957b1b99786f9eeecdce2b"} Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.688007 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"02def983-8ec2-47ee-bc06-cb08d42795f4","Type":"ContainerStarted","Data":"df69fa4f5c6aee46927bdfeef172568535af0ac7373fea216969c568cc695601"} Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.725807 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-5qvqz" podStartSLOduration=2.725788844 podStartE2EDuration="2.725788844s" podCreationTimestamp="2025-10-13 21:29:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:29:06.720122141 +0000 UTC m=+1063.638367226" watchObservedRunningTime="2025-10-13 21:29:06.725788844 +0000 UTC m=+1063.644033929" Oct 13 21:29:06 crc kubenswrapper[4689]: I1013 21:29:06.829266 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pbwb5"] Oct 13 21:29:07 crc kubenswrapper[4689]: I1013 21:29:07.717122 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" event={"ID":"1a79fbe5-a692-405a-88f9-4bd7f011ecad","Type":"ContainerStarted","Data":"342be3253311bfd48d306c06adf2ce883aa8b9d82a07e52601825bfe9e933b74"} Oct 13 21:29:07 crc kubenswrapper[4689]: I1013 21:29:07.717367 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:07 crc kubenswrapper[4689]: I1013 21:29:07.720738 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-pbwb5" event={"ID":"9590d1d7-97f4-426a-a0e0-8f4d941489bc","Type":"ContainerStarted","Data":"12c3681e6d7a018825098f3a96af711970788d717db880da78bfd946d311acee"} Oct 13 21:29:07 crc kubenswrapper[4689]: I1013 21:29:07.720772 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-pbwb5" event={"ID":"9590d1d7-97f4-426a-a0e0-8f4d941489bc","Type":"ContainerStarted","Data":"f15c02ceb12cbf477834249ede5953c3774ab84b95185cecd82a5e1a4d681770"} Oct 13 21:29:07 crc kubenswrapper[4689]: I1013 21:29:07.723927 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9074694f-45ca-42e9-b596-ddffa2726b6c","Type":"ContainerStarted","Data":"ed7a88820f58c120a6ec6ef8616e9a897f37528108a9e98f1cefca4dc913b85a"} Oct 13 21:29:07 crc kubenswrapper[4689]: I1013 21:29:07.742732 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" podStartSLOduration=2.742714421 podStartE2EDuration="2.742714421s" podCreationTimestamp="2025-10-13 21:29:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:29:07.740712854 +0000 UTC m=+1064.658957949" watchObservedRunningTime="2025-10-13 21:29:07.742714421 +0000 UTC m=+1064.660959506" Oct 13 21:29:07 crc kubenswrapper[4689]: I1013 21:29:07.777615 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.0674624169999998 podStartE2EDuration="3.777578544s" podCreationTimestamp="2025-10-13 21:29:04 +0000 UTC" firstStartedPulling="2025-10-13 21:29:05.552979576 +0000 UTC m=+1062.471224661" lastFinishedPulling="2025-10-13 21:29:07.263095703 +0000 UTC m=+1064.181340788" observedRunningTime="2025-10-13 21:29:07.756992278 +0000 UTC m=+1064.675237363" watchObservedRunningTime="2025-10-13 21:29:07.777578544 +0000 UTC m=+1064.695823639" Oct 13 21:29:07 crc kubenswrapper[4689]: I1013 21:29:07.778789 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-pbwb5" podStartSLOduration=2.778783573 podStartE2EDuration="2.778783573s" podCreationTimestamp="2025-10-13 21:29:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:29:07.772121895 +0000 UTC m=+1064.690366980" watchObservedRunningTime="2025-10-13 21:29:07.778783573 +0000 UTC m=+1064.697028648" Oct 13 21:29:09 crc kubenswrapper[4689]: I1013 21:29:09.029656 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 21:29:09 crc kubenswrapper[4689]: I1013 21:29:09.039176 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:29:09 crc kubenswrapper[4689]: I1013 21:29:09.746432 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"762ff29c-7685-452a-bd06-561632aed1d9","Type":"ContainerStarted","Data":"f92ec5d0fda0ab92140b6a131dcf9eb9afc315fd8345995242f1cd9568d908d5"} Oct 13 21:29:09 crc kubenswrapper[4689]: I1013 21:29:09.746498 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="762ff29c-7685-452a-bd06-561632aed1d9" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://f92ec5d0fda0ab92140b6a131dcf9eb9afc315fd8345995242f1cd9568d908d5" gracePeriod=30 Oct 13 21:29:09 crc kubenswrapper[4689]: I1013 21:29:09.748734 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"02def983-8ec2-47ee-bc06-cb08d42795f4","Type":"ContainerStarted","Data":"17facf47ddccdd7d73c1a0f2bf0ee59d47cfa9cb3c635ca30b29a4d5e273ce74"} Oct 13 21:29:09 crc kubenswrapper[4689]: I1013 21:29:09.748783 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"02def983-8ec2-47ee-bc06-cb08d42795f4","Type":"ContainerStarted","Data":"713f2431d705500d1a34d131da3f444cd725600ea364c7ce4a26f5f04a6ec262"} Oct 13 21:29:09 crc kubenswrapper[4689]: I1013 21:29:09.752289 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8e23c768-d1a5-4087-bb92-6497004bda51","Type":"ContainerStarted","Data":"ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c"} Oct 13 21:29:09 crc kubenswrapper[4689]: I1013 21:29:09.752343 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8e23c768-d1a5-4087-bb92-6497004bda51","Type":"ContainerStarted","Data":"f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19"} Oct 13 21:29:09 crc kubenswrapper[4689]: I1013 21:29:09.752367 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8e23c768-d1a5-4087-bb92-6497004bda51" containerName="nova-metadata-log" containerID="cri-o://f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19" gracePeriod=30 Oct 13 21:29:09 crc kubenswrapper[4689]: I1013 21:29:09.752419 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8e23c768-d1a5-4087-bb92-6497004bda51" containerName="nova-metadata-metadata" containerID="cri-o://ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c" gracePeriod=30 Oct 13 21:29:09 crc kubenswrapper[4689]: I1013 21:29:09.764009 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.195849026 podStartE2EDuration="5.763990438s" podCreationTimestamp="2025-10-13 21:29:04 +0000 UTC" firstStartedPulling="2025-10-13 21:29:06.186723063 +0000 UTC m=+1063.104968148" lastFinishedPulling="2025-10-13 21:29:08.754864475 +0000 UTC m=+1065.673109560" observedRunningTime="2025-10-13 21:29:09.762964393 +0000 UTC m=+1066.681209478" watchObservedRunningTime="2025-10-13 21:29:09.763990438 +0000 UTC m=+1066.682235523" Oct 13 21:29:09 crc kubenswrapper[4689]: I1013 21:29:09.788164 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.96353046 podStartE2EDuration="5.788144998s" podCreationTimestamp="2025-10-13 21:29:04 +0000 UTC" firstStartedPulling="2025-10-13 21:29:05.936466443 +0000 UTC m=+1062.854711528" lastFinishedPulling="2025-10-13 21:29:08.761080981 +0000 UTC m=+1065.679326066" observedRunningTime="2025-10-13 21:29:09.781255875 +0000 UTC m=+1066.699500960" watchObservedRunningTime="2025-10-13 21:29:09.788144998 +0000 UTC m=+1066.706390083" Oct 13 21:29:09 crc kubenswrapper[4689]: I1013 21:29:09.810967 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.147894032 podStartE2EDuration="5.810943936s" podCreationTimestamp="2025-10-13 21:29:04 +0000 UTC" firstStartedPulling="2025-10-13 21:29:06.098271434 +0000 UTC m=+1063.016516519" lastFinishedPulling="2025-10-13 21:29:08.761321348 +0000 UTC m=+1065.679566423" observedRunningTime="2025-10-13 21:29:09.798261337 +0000 UTC m=+1066.716506422" watchObservedRunningTime="2025-10-13 21:29:09.810943936 +0000 UTC m=+1066.729189021" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.237119 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.427373 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.536088 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.537353 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9www\" (UniqueName: \"kubernetes.io/projected/8e23c768-d1a5-4087-bb92-6497004bda51-kube-api-access-x9www\") pod \"8e23c768-d1a5-4087-bb92-6497004bda51\" (UID: \"8e23c768-d1a5-4087-bb92-6497004bda51\") " Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.537546 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e23c768-d1a5-4087-bb92-6497004bda51-logs\") pod \"8e23c768-d1a5-4087-bb92-6497004bda51\" (UID: \"8e23c768-d1a5-4087-bb92-6497004bda51\") " Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.537803 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e23c768-d1a5-4087-bb92-6497004bda51-config-data\") pod \"8e23c768-d1a5-4087-bb92-6497004bda51\" (UID: \"8e23c768-d1a5-4087-bb92-6497004bda51\") " Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.537933 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e23c768-d1a5-4087-bb92-6497004bda51-logs" (OuterVolumeSpecName: "logs") pod "8e23c768-d1a5-4087-bb92-6497004bda51" (UID: "8e23c768-d1a5-4087-bb92-6497004bda51"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.537956 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e23c768-d1a5-4087-bb92-6497004bda51-combined-ca-bundle\") pod \"8e23c768-d1a5-4087-bb92-6497004bda51\" (UID: \"8e23c768-d1a5-4087-bb92-6497004bda51\") " Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.538650 4689 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e23c768-d1a5-4087-bb92-6497004bda51-logs\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.543327 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e23c768-d1a5-4087-bb92-6497004bda51-kube-api-access-x9www" (OuterVolumeSpecName: "kube-api-access-x9www") pod "8e23c768-d1a5-4087-bb92-6497004bda51" (UID: "8e23c768-d1a5-4087-bb92-6497004bda51"). InnerVolumeSpecName "kube-api-access-x9www". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.564383 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e23c768-d1a5-4087-bb92-6497004bda51-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8e23c768-d1a5-4087-bb92-6497004bda51" (UID: "8e23c768-d1a5-4087-bb92-6497004bda51"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.566925 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e23c768-d1a5-4087-bb92-6497004bda51-config-data" (OuterVolumeSpecName: "config-data") pod "8e23c768-d1a5-4087-bb92-6497004bda51" (UID: "8e23c768-d1a5-4087-bb92-6497004bda51"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.640689 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e23c768-d1a5-4087-bb92-6497004bda51-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.640718 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e23c768-d1a5-4087-bb92-6497004bda51-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.640731 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9www\" (UniqueName: \"kubernetes.io/projected/8e23c768-d1a5-4087-bb92-6497004bda51-kube-api-access-x9www\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.765564 4689 generic.go:334] "Generic (PLEG): container finished" podID="8e23c768-d1a5-4087-bb92-6497004bda51" containerID="ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c" exitCode=0 Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.765645 4689 generic.go:334] "Generic (PLEG): container finished" podID="8e23c768-d1a5-4087-bb92-6497004bda51" containerID="f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19" exitCode=143 Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.765659 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.765634 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8e23c768-d1a5-4087-bb92-6497004bda51","Type":"ContainerDied","Data":"ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c"} Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.766705 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8e23c768-d1a5-4087-bb92-6497004bda51","Type":"ContainerDied","Data":"f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19"} Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.766832 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8e23c768-d1a5-4087-bb92-6497004bda51","Type":"ContainerDied","Data":"a3b1ea12c26d4b44cdc87b6dfdc42c7c0fd3d364f08d206f0aa0ea76d2b1239b"} Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.766743 4689 scope.go:117] "RemoveContainer" containerID="ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.791735 4689 scope.go:117] "RemoveContainer" containerID="f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.821633 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.835077 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.840030 4689 scope.go:117] "RemoveContainer" containerID="ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c" Oct 13 21:29:10 crc kubenswrapper[4689]: E1013 21:29:10.840501 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c\": container with ID starting with ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c not found: ID does not exist" containerID="ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.840542 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c"} err="failed to get container status \"ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c\": rpc error: code = NotFound desc = could not find container \"ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c\": container with ID starting with ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c not found: ID does not exist" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.840566 4689 scope.go:117] "RemoveContainer" containerID="f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19" Oct 13 21:29:10 crc kubenswrapper[4689]: E1013 21:29:10.841122 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19\": container with ID starting with f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19 not found: ID does not exist" containerID="f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.841143 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19"} err="failed to get container status \"f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19\": rpc error: code = NotFound desc = could not find container \"f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19\": container with ID starting with f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19 not found: ID does not exist" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.841158 4689 scope.go:117] "RemoveContainer" containerID="ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.841453 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c"} err="failed to get container status \"ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c\": rpc error: code = NotFound desc = could not find container \"ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c\": container with ID starting with ccf155b01a5f01538971a2ee04c7f0605f0dd51a4213917decda8e9a7192c47c not found: ID does not exist" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.841472 4689 scope.go:117] "RemoveContainer" containerID="f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.841826 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19"} err="failed to get container status \"f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19\": rpc error: code = NotFound desc = could not find container \"f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19\": container with ID starting with f16ea3e5cacd0b4ea6fdb1b2968cd71ed1969c6a687d1b44f7118a0b52c6cb19 not found: ID does not exist" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.854892 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:29:10 crc kubenswrapper[4689]: E1013 21:29:10.855490 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e23c768-d1a5-4087-bb92-6497004bda51" containerName="nova-metadata-metadata" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.855519 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e23c768-d1a5-4087-bb92-6497004bda51" containerName="nova-metadata-metadata" Oct 13 21:29:10 crc kubenswrapper[4689]: E1013 21:29:10.855555 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e23c768-d1a5-4087-bb92-6497004bda51" containerName="nova-metadata-log" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.855566 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e23c768-d1a5-4087-bb92-6497004bda51" containerName="nova-metadata-log" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.855984 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e23c768-d1a5-4087-bb92-6497004bda51" containerName="nova-metadata-log" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.856031 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e23c768-d1a5-4087-bb92-6497004bda51" containerName="nova-metadata-metadata" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.857709 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.862335 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.862734 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.862747 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.945820 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " pod="openstack/nova-metadata-0" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.946426 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " pod="openstack/nova-metadata-0" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.946470 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-config-data\") pod \"nova-metadata-0\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " pod="openstack/nova-metadata-0" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.946545 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2s42\" (UniqueName: \"kubernetes.io/projected/8a35e003-9930-4dfd-b078-e017b90f1090-kube-api-access-l2s42\") pod \"nova-metadata-0\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " pod="openstack/nova-metadata-0" Oct 13 21:29:10 crc kubenswrapper[4689]: I1013 21:29:10.946573 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a35e003-9930-4dfd-b078-e017b90f1090-logs\") pod \"nova-metadata-0\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " pod="openstack/nova-metadata-0" Oct 13 21:29:11 crc kubenswrapper[4689]: I1013 21:29:11.048058 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2s42\" (UniqueName: \"kubernetes.io/projected/8a35e003-9930-4dfd-b078-e017b90f1090-kube-api-access-l2s42\") pod \"nova-metadata-0\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " pod="openstack/nova-metadata-0" Oct 13 21:29:11 crc kubenswrapper[4689]: I1013 21:29:11.048134 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a35e003-9930-4dfd-b078-e017b90f1090-logs\") pod \"nova-metadata-0\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " pod="openstack/nova-metadata-0" Oct 13 21:29:11 crc kubenswrapper[4689]: I1013 21:29:11.048187 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " pod="openstack/nova-metadata-0" Oct 13 21:29:11 crc kubenswrapper[4689]: I1013 21:29:11.048319 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " pod="openstack/nova-metadata-0" Oct 13 21:29:11 crc kubenswrapper[4689]: I1013 21:29:11.048357 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-config-data\") pod \"nova-metadata-0\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " pod="openstack/nova-metadata-0" Oct 13 21:29:11 crc kubenswrapper[4689]: I1013 21:29:11.048528 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a35e003-9930-4dfd-b078-e017b90f1090-logs\") pod \"nova-metadata-0\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " pod="openstack/nova-metadata-0" Oct 13 21:29:11 crc kubenswrapper[4689]: I1013 21:29:11.053727 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " pod="openstack/nova-metadata-0" Oct 13 21:29:11 crc kubenswrapper[4689]: I1013 21:29:11.054138 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " pod="openstack/nova-metadata-0" Oct 13 21:29:11 crc kubenswrapper[4689]: I1013 21:29:11.063291 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-config-data\") pod \"nova-metadata-0\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " pod="openstack/nova-metadata-0" Oct 13 21:29:11 crc kubenswrapper[4689]: I1013 21:29:11.064976 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2s42\" (UniqueName: \"kubernetes.io/projected/8a35e003-9930-4dfd-b078-e017b90f1090-kube-api-access-l2s42\") pod \"nova-metadata-0\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " pod="openstack/nova-metadata-0" Oct 13 21:29:11 crc kubenswrapper[4689]: I1013 21:29:11.205986 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 21:29:11 crc kubenswrapper[4689]: I1013 21:29:11.637121 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:29:11 crc kubenswrapper[4689]: W1013 21:29:11.645918 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a35e003_9930_4dfd_b078_e017b90f1090.slice/crio-c0f74e72e939b35e25366e1198dd22ce366f5b9fd9d32f131d5f9c177be5a509 WatchSource:0}: Error finding container c0f74e72e939b35e25366e1198dd22ce366f5b9fd9d32f131d5f9c177be5a509: Status 404 returned error can't find the container with id c0f74e72e939b35e25366e1198dd22ce366f5b9fd9d32f131d5f9c177be5a509 Oct 13 21:29:11 crc kubenswrapper[4689]: I1013 21:29:11.778679 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8a35e003-9930-4dfd-b078-e017b90f1090","Type":"ContainerStarted","Data":"c0f74e72e939b35e25366e1198dd22ce366f5b9fd9d32f131d5f9c177be5a509"} Oct 13 21:29:11 crc kubenswrapper[4689]: I1013 21:29:11.877246 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e23c768-d1a5-4087-bb92-6497004bda51" path="/var/lib/kubelet/pods/8e23c768-d1a5-4087-bb92-6497004bda51/volumes" Oct 13 21:29:12 crc kubenswrapper[4689]: I1013 21:29:12.791846 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8a35e003-9930-4dfd-b078-e017b90f1090","Type":"ContainerStarted","Data":"f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388"} Oct 13 21:29:12 crc kubenswrapper[4689]: I1013 21:29:12.792172 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8a35e003-9930-4dfd-b078-e017b90f1090","Type":"ContainerStarted","Data":"cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3"} Oct 13 21:29:12 crc kubenswrapper[4689]: I1013 21:29:12.819127 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.81910401 podStartE2EDuration="2.81910401s" podCreationTimestamp="2025-10-13 21:29:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:29:12.811731805 +0000 UTC m=+1069.729976910" watchObservedRunningTime="2025-10-13 21:29:12.81910401 +0000 UTC m=+1069.737349105" Oct 13 21:29:13 crc kubenswrapper[4689]: I1013 21:29:13.802725 4689 generic.go:334] "Generic (PLEG): container finished" podID="9590d1d7-97f4-426a-a0e0-8f4d941489bc" containerID="12c3681e6d7a018825098f3a96af711970788d717db880da78bfd946d311acee" exitCode=0 Oct 13 21:29:13 crc kubenswrapper[4689]: I1013 21:29:13.802785 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-pbwb5" event={"ID":"9590d1d7-97f4-426a-a0e0-8f4d941489bc","Type":"ContainerDied","Data":"12c3681e6d7a018825098f3a96af711970788d717db880da78bfd946d311acee"} Oct 13 21:29:13 crc kubenswrapper[4689]: I1013 21:29:13.805742 4689 generic.go:334] "Generic (PLEG): container finished" podID="2d94648e-97ea-49c5-a6f4-46b96f12ef74" containerID="0e775227d4a03e315be12d2f6903db3dd4dfd3953bdc834df6f36e1f186887e9" exitCode=0 Oct 13 21:29:13 crc kubenswrapper[4689]: I1013 21:29:13.805782 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-5qvqz" event={"ID":"2d94648e-97ea-49c5-a6f4-46b96f12ef74","Type":"ContainerDied","Data":"0e775227d4a03e315be12d2f6903db3dd4dfd3953bdc834df6f36e1f186887e9"} Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.236563 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.262936 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-5qvqz" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.266223 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.269803 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-pbwb5" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.281107 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.281143 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.389108 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fjks\" (UniqueName: \"kubernetes.io/projected/9590d1d7-97f4-426a-a0e0-8f4d941489bc-kube-api-access-6fjks\") pod \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\" (UID: \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\") " Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.389153 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-scripts\") pod \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\" (UID: \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\") " Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.389194 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-config-data\") pod \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\" (UID: \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\") " Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.389237 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvk66\" (UniqueName: \"kubernetes.io/projected/2d94648e-97ea-49c5-a6f4-46b96f12ef74-kube-api-access-nvk66\") pod \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\" (UID: \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\") " Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.389257 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-combined-ca-bundle\") pod \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\" (UID: \"2d94648e-97ea-49c5-a6f4-46b96f12ef74\") " Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.389292 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-combined-ca-bundle\") pod \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\" (UID: \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\") " Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.389318 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-config-data\") pod \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\" (UID: \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\") " Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.389347 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-scripts\") pod \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\" (UID: \"9590d1d7-97f4-426a-a0e0-8f4d941489bc\") " Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.395465 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-scripts" (OuterVolumeSpecName: "scripts") pod "9590d1d7-97f4-426a-a0e0-8f4d941489bc" (UID: "9590d1d7-97f4-426a-a0e0-8f4d941489bc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.395609 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9590d1d7-97f4-426a-a0e0-8f4d941489bc-kube-api-access-6fjks" (OuterVolumeSpecName: "kube-api-access-6fjks") pod "9590d1d7-97f4-426a-a0e0-8f4d941489bc" (UID: "9590d1d7-97f4-426a-a0e0-8f4d941489bc"). InnerVolumeSpecName "kube-api-access-6fjks". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.408630 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-scripts" (OuterVolumeSpecName: "scripts") pod "2d94648e-97ea-49c5-a6f4-46b96f12ef74" (UID: "2d94648e-97ea-49c5-a6f4-46b96f12ef74"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.414645 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d94648e-97ea-49c5-a6f4-46b96f12ef74-kube-api-access-nvk66" (OuterVolumeSpecName: "kube-api-access-nvk66") pod "2d94648e-97ea-49c5-a6f4-46b96f12ef74" (UID: "2d94648e-97ea-49c5-a6f4-46b96f12ef74"). InnerVolumeSpecName "kube-api-access-nvk66". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.423313 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-config-data" (OuterVolumeSpecName: "config-data") pod "9590d1d7-97f4-426a-a0e0-8f4d941489bc" (UID: "9590d1d7-97f4-426a-a0e0-8f4d941489bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.423415 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2d94648e-97ea-49c5-a6f4-46b96f12ef74" (UID: "2d94648e-97ea-49c5-a6f4-46b96f12ef74"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.425086 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-config-data" (OuterVolumeSpecName: "config-data") pod "2d94648e-97ea-49c5-a6f4-46b96f12ef74" (UID: "2d94648e-97ea-49c5-a6f4-46b96f12ef74"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.432766 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9590d1d7-97f4-426a-a0e0-8f4d941489bc" (UID: "9590d1d7-97f4-426a-a0e0-8f4d941489bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.491914 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.491953 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvk66\" (UniqueName: \"kubernetes.io/projected/2d94648e-97ea-49c5-a6f4-46b96f12ef74-kube-api-access-nvk66\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.491965 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.491975 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.491985 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.491993 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9590d1d7-97f4-426a-a0e0-8f4d941489bc-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.492004 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fjks\" (UniqueName: \"kubernetes.io/projected/9590d1d7-97f4-426a-a0e0-8f4d941489bc-kube-api-access-6fjks\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.492012 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d94648e-97ea-49c5-a6f4-46b96f12ef74-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.544755 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.605796 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-8q7f6"] Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.606031 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" podUID="776abc4d-58f9-4028-ae6a-deeddad2a105" containerName="dnsmasq-dns" containerID="cri-o://ea88e9a0d54143830376b8346458f966c5ed103cccc1c6e0959c286f551529b7" gracePeriod=10 Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.846970 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-pbwb5" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.847499 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-pbwb5" event={"ID":"9590d1d7-97f4-426a-a0e0-8f4d941489bc","Type":"ContainerDied","Data":"f15c02ceb12cbf477834249ede5953c3774ab84b95185cecd82a5e1a4d681770"} Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.847546 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f15c02ceb12cbf477834249ede5953c3774ab84b95185cecd82a5e1a4d681770" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.859418 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-5qvqz" event={"ID":"2d94648e-97ea-49c5-a6f4-46b96f12ef74","Type":"ContainerDied","Data":"071a6e7e38b81495bd755b1c16333e46bd8783aa10957b1b99786f9eeecdce2b"} Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.859470 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="071a6e7e38b81495bd755b1c16333e46bd8783aa10957b1b99786f9eeecdce2b" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.859726 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-5qvqz" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.868823 4689 generic.go:334] "Generic (PLEG): container finished" podID="776abc4d-58f9-4028-ae6a-deeddad2a105" containerID="ea88e9a0d54143830376b8346458f966c5ed103cccc1c6e0959c286f551529b7" exitCode=0 Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.888392 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" event={"ID":"776abc4d-58f9-4028-ae6a-deeddad2a105","Type":"ContainerDied","Data":"ea88e9a0d54143830376b8346458f966c5ed103cccc1c6e0959c286f551529b7"} Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.912442 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.926663 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 13 21:29:15 crc kubenswrapper[4689]: E1013 21:29:15.927154 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d94648e-97ea-49c5-a6f4-46b96f12ef74" containerName="nova-manage" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.927175 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d94648e-97ea-49c5-a6f4-46b96f12ef74" containerName="nova-manage" Oct 13 21:29:15 crc kubenswrapper[4689]: E1013 21:29:15.927206 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9590d1d7-97f4-426a-a0e0-8f4d941489bc" containerName="nova-cell1-conductor-db-sync" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.927213 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="9590d1d7-97f4-426a-a0e0-8f4d941489bc" containerName="nova-cell1-conductor-db-sync" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.927416 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="9590d1d7-97f4-426a-a0e0-8f4d941489bc" containerName="nova-cell1-conductor-db-sync" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.927450 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d94648e-97ea-49c5-a6f4-46b96f12ef74" containerName="nova-manage" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.928139 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.932943 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 13 21:29:15 crc kubenswrapper[4689]: I1013 21:29:15.938105 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.007939 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/573c1817-260c-43b0-a892-f393e2d4ba07-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"573c1817-260c-43b0-a892-f393e2d4ba07\") " pod="openstack/nova-cell1-conductor-0" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.008224 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24vx9\" (UniqueName: \"kubernetes.io/projected/573c1817-260c-43b0-a892-f393e2d4ba07-kube-api-access-24vx9\") pod \"nova-cell1-conductor-0\" (UID: \"573c1817-260c-43b0-a892-f393e2d4ba07\") " pod="openstack/nova-cell1-conductor-0" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.008343 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/573c1817-260c-43b0-a892-f393e2d4ba07-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"573c1817-260c-43b0-a892-f393e2d4ba07\") " pod="openstack/nova-cell1-conductor-0" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.042956 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.043176 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="02def983-8ec2-47ee-bc06-cb08d42795f4" containerName="nova-api-log" containerID="cri-o://713f2431d705500d1a34d131da3f444cd725600ea364c7ce4a26f5f04a6ec262" gracePeriod=30 Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.043358 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="02def983-8ec2-47ee-bc06-cb08d42795f4" containerName="nova-api-api" containerID="cri-o://17facf47ddccdd7d73c1a0f2bf0ee59d47cfa9cb3c635ca30b29a4d5e273ce74" gracePeriod=30 Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.052029 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="02def983-8ec2-47ee-bc06-cb08d42795f4" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.189:8774/\": EOF" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.052044 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="02def983-8ec2-47ee-bc06-cb08d42795f4" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.189:8774/\": EOF" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.100322 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.111036 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24vx9\" (UniqueName: \"kubernetes.io/projected/573c1817-260c-43b0-a892-f393e2d4ba07-kube-api-access-24vx9\") pod \"nova-cell1-conductor-0\" (UID: \"573c1817-260c-43b0-a892-f393e2d4ba07\") " pod="openstack/nova-cell1-conductor-0" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.119665 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.119912 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8a35e003-9930-4dfd-b078-e017b90f1090" containerName="nova-metadata-log" containerID="cri-o://cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3" gracePeriod=30 Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.120068 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8a35e003-9930-4dfd-b078-e017b90f1090" containerName="nova-metadata-metadata" containerID="cri-o://f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388" gracePeriod=30 Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.136224 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/573c1817-260c-43b0-a892-f393e2d4ba07-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"573c1817-260c-43b0-a892-f393e2d4ba07\") " pod="openstack/nova-cell1-conductor-0" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.137301 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/573c1817-260c-43b0-a892-f393e2d4ba07-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"573c1817-260c-43b0-a892-f393e2d4ba07\") " pod="openstack/nova-cell1-conductor-0" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.137569 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/573c1817-260c-43b0-a892-f393e2d4ba07-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"573c1817-260c-43b0-a892-f393e2d4ba07\") " pod="openstack/nova-cell1-conductor-0" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.151691 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/573c1817-260c-43b0-a892-f393e2d4ba07-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"573c1817-260c-43b0-a892-f393e2d4ba07\") " pod="openstack/nova-cell1-conductor-0" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.155902 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24vx9\" (UniqueName: \"kubernetes.io/projected/573c1817-260c-43b0-a892-f393e2d4ba07-kube-api-access-24vx9\") pod \"nova-cell1-conductor-0\" (UID: \"573c1817-260c-43b0-a892-f393e2d4ba07\") " pod="openstack/nova-cell1-conductor-0" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.209211 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.209276 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.239487 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-ovsdbserver-nb\") pod \"776abc4d-58f9-4028-ae6a-deeddad2a105\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.239643 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-config\") pod \"776abc4d-58f9-4028-ae6a-deeddad2a105\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.239857 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-dns-svc\") pod \"776abc4d-58f9-4028-ae6a-deeddad2a105\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.239955 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5krlr\" (UniqueName: \"kubernetes.io/projected/776abc4d-58f9-4028-ae6a-deeddad2a105-kube-api-access-5krlr\") pod \"776abc4d-58f9-4028-ae6a-deeddad2a105\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.239987 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-dns-swift-storage-0\") pod \"776abc4d-58f9-4028-ae6a-deeddad2a105\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.240041 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-ovsdbserver-sb\") pod \"776abc4d-58f9-4028-ae6a-deeddad2a105\" (UID: \"776abc4d-58f9-4028-ae6a-deeddad2a105\") " Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.246204 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/776abc4d-58f9-4028-ae6a-deeddad2a105-kube-api-access-5krlr" (OuterVolumeSpecName: "kube-api-access-5krlr") pod "776abc4d-58f9-4028-ae6a-deeddad2a105" (UID: "776abc4d-58f9-4028-ae6a-deeddad2a105"). InnerVolumeSpecName "kube-api-access-5krlr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.250828 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.316947 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "776abc4d-58f9-4028-ae6a-deeddad2a105" (UID: "776abc4d-58f9-4028-ae6a-deeddad2a105"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.341140 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "776abc4d-58f9-4028-ae6a-deeddad2a105" (UID: "776abc4d-58f9-4028-ae6a-deeddad2a105"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.342274 4689 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.342292 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5krlr\" (UniqueName: \"kubernetes.io/projected/776abc4d-58f9-4028-ae6a-deeddad2a105-kube-api-access-5krlr\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.342302 4689 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.353212 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "776abc4d-58f9-4028-ae6a-deeddad2a105" (UID: "776abc4d-58f9-4028-ae6a-deeddad2a105"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.364111 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-config" (OuterVolumeSpecName: "config") pod "776abc4d-58f9-4028-ae6a-deeddad2a105" (UID: "776abc4d-58f9-4028-ae6a-deeddad2a105"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.381121 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "776abc4d-58f9-4028-ae6a-deeddad2a105" (UID: "776abc4d-58f9-4028-ae6a-deeddad2a105"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.444557 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.444584 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.444604 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/776abc4d-58f9-4028-ae6a-deeddad2a105-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.531982 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.754532 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.819230 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.877624 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"02def983-8ec2-47ee-bc06-cb08d42795f4","Type":"ContainerDied","Data":"713f2431d705500d1a34d131da3f444cd725600ea364c7ce4a26f5f04a6ec262"} Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.877626 4689 generic.go:334] "Generic (PLEG): container finished" podID="02def983-8ec2-47ee-bc06-cb08d42795f4" containerID="713f2431d705500d1a34d131da3f444cd725600ea364c7ce4a26f5f04a6ec262" exitCode=143 Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.879178 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" event={"ID":"776abc4d-58f9-4028-ae6a-deeddad2a105","Type":"ContainerDied","Data":"5b6e55647b82db3f4e3c76bd0311918f605b2e933eb78fcbe1d13ecbc30af45b"} Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.879213 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-8q7f6" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.879248 4689 scope.go:117] "RemoveContainer" containerID="ea88e9a0d54143830376b8346458f966c5ed103cccc1c6e0959c286f551529b7" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.883376 4689 generic.go:334] "Generic (PLEG): container finished" podID="8a35e003-9930-4dfd-b078-e017b90f1090" containerID="f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388" exitCode=0 Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.883403 4689 generic.go:334] "Generic (PLEG): container finished" podID="8a35e003-9930-4dfd-b078-e017b90f1090" containerID="cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3" exitCode=143 Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.883442 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8a35e003-9930-4dfd-b078-e017b90f1090","Type":"ContainerDied","Data":"f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388"} Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.883468 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8a35e003-9930-4dfd-b078-e017b90f1090","Type":"ContainerDied","Data":"cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3"} Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.883478 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8a35e003-9930-4dfd-b078-e017b90f1090","Type":"ContainerDied","Data":"c0f74e72e939b35e25366e1198dd22ce366f5b9fd9d32f131d5f9c177be5a509"} Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.883531 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.885887 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"573c1817-260c-43b0-a892-f393e2d4ba07","Type":"ContainerStarted","Data":"40904df3590a39f0d7794c32c342e17337300dbcb59a88673b1d31a89e9a0fc8"} Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.905282 4689 scope.go:117] "RemoveContainer" containerID="c2b801a2882e90b44a263d807b9b70a7969920664fb97b229b1ccc0076266c69" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.935899 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-8q7f6"] Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.946010 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-8q7f6"] Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.946968 4689 scope.go:117] "RemoveContainer" containerID="f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.964080 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-nova-metadata-tls-certs\") pod \"8a35e003-9930-4dfd-b078-e017b90f1090\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.964133 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a35e003-9930-4dfd-b078-e017b90f1090-logs\") pod \"8a35e003-9930-4dfd-b078-e017b90f1090\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.964247 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2s42\" (UniqueName: \"kubernetes.io/projected/8a35e003-9930-4dfd-b078-e017b90f1090-kube-api-access-l2s42\") pod \"8a35e003-9930-4dfd-b078-e017b90f1090\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.964307 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-config-data\") pod \"8a35e003-9930-4dfd-b078-e017b90f1090\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.964347 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-combined-ca-bundle\") pod \"8a35e003-9930-4dfd-b078-e017b90f1090\" (UID: \"8a35e003-9930-4dfd-b078-e017b90f1090\") " Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.966029 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a35e003-9930-4dfd-b078-e017b90f1090-logs" (OuterVolumeSpecName: "logs") pod "8a35e003-9930-4dfd-b078-e017b90f1090" (UID: "8a35e003-9930-4dfd-b078-e017b90f1090"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.967736 4689 scope.go:117] "RemoveContainer" containerID="cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.967875 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a35e003-9930-4dfd-b078-e017b90f1090-kube-api-access-l2s42" (OuterVolumeSpecName: "kube-api-access-l2s42") pod "8a35e003-9930-4dfd-b078-e017b90f1090" (UID: "8a35e003-9930-4dfd-b078-e017b90f1090"). InnerVolumeSpecName "kube-api-access-l2s42". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.989350 4689 scope.go:117] "RemoveContainer" containerID="f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388" Oct 13 21:29:16 crc kubenswrapper[4689]: E1013 21:29:16.990087 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388\": container with ID starting with f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388 not found: ID does not exist" containerID="f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.990131 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388"} err="failed to get container status \"f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388\": rpc error: code = NotFound desc = could not find container \"f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388\": container with ID starting with f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388 not found: ID does not exist" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.990154 4689 scope.go:117] "RemoveContainer" containerID="cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3" Oct 13 21:29:16 crc kubenswrapper[4689]: E1013 21:29:16.991791 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3\": container with ID starting with cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3 not found: ID does not exist" containerID="cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.991830 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3"} err="failed to get container status \"cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3\": rpc error: code = NotFound desc = could not find container \"cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3\": container with ID starting with cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3 not found: ID does not exist" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.991851 4689 scope.go:117] "RemoveContainer" containerID="f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.991870 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-config-data" (OuterVolumeSpecName: "config-data") pod "8a35e003-9930-4dfd-b078-e017b90f1090" (UID: "8a35e003-9930-4dfd-b078-e017b90f1090"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.992285 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388"} err="failed to get container status \"f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388\": rpc error: code = NotFound desc = could not find container \"f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388\": container with ID starting with f27f3fcadb3c7c178073743b12757576dfd25ffc7d5dde84bef618f1f82ea388 not found: ID does not exist" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.992314 4689 scope.go:117] "RemoveContainer" containerID="cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3" Oct 13 21:29:16 crc kubenswrapper[4689]: I1013 21:29:16.992606 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3"} err="failed to get container status \"cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3\": rpc error: code = NotFound desc = could not find container \"cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3\": container with ID starting with cc1a066d62426f78ea933073efa76e82e597302778977545f22be1049ce2ede3 not found: ID does not exist" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.007832 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8a35e003-9930-4dfd-b078-e017b90f1090" (UID: "8a35e003-9930-4dfd-b078-e017b90f1090"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.014756 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "8a35e003-9930-4dfd-b078-e017b90f1090" (UID: "8a35e003-9930-4dfd-b078-e017b90f1090"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.069095 4689 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.069255 4689 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a35e003-9930-4dfd-b078-e017b90f1090-logs\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.069434 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2s42\" (UniqueName: \"kubernetes.io/projected/8a35e003-9930-4dfd-b078-e017b90f1090-kube-api-access-l2s42\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.069473 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.069485 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a35e003-9930-4dfd-b078-e017b90f1090-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.223236 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.244006 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.255327 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:29:17 crc kubenswrapper[4689]: E1013 21:29:17.255742 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a35e003-9930-4dfd-b078-e017b90f1090" containerName="nova-metadata-log" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.255759 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a35e003-9930-4dfd-b078-e017b90f1090" containerName="nova-metadata-log" Oct 13 21:29:17 crc kubenswrapper[4689]: E1013 21:29:17.255787 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a35e003-9930-4dfd-b078-e017b90f1090" containerName="nova-metadata-metadata" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.255794 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a35e003-9930-4dfd-b078-e017b90f1090" containerName="nova-metadata-metadata" Oct 13 21:29:17 crc kubenswrapper[4689]: E1013 21:29:17.255808 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="776abc4d-58f9-4028-ae6a-deeddad2a105" containerName="init" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.255814 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="776abc4d-58f9-4028-ae6a-deeddad2a105" containerName="init" Oct 13 21:29:17 crc kubenswrapper[4689]: E1013 21:29:17.255827 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="776abc4d-58f9-4028-ae6a-deeddad2a105" containerName="dnsmasq-dns" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.255832 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="776abc4d-58f9-4028-ae6a-deeddad2a105" containerName="dnsmasq-dns" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.256009 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="776abc4d-58f9-4028-ae6a-deeddad2a105" containerName="dnsmasq-dns" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.256034 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a35e003-9930-4dfd-b078-e017b90f1090" containerName="nova-metadata-log" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.256045 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a35e003-9930-4dfd-b078-e017b90f1090" containerName="nova-metadata-metadata" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.257022 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.259711 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.259996 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.264901 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.374410 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " pod="openstack/nova-metadata-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.374451 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " pod="openstack/nova-metadata-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.374499 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-config-data\") pod \"nova-metadata-0\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " pod="openstack/nova-metadata-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.374686 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/352d4de6-8c1e-462f-a7f6-6c4f2955707d-logs\") pod \"nova-metadata-0\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " pod="openstack/nova-metadata-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.374742 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmt67\" (UniqueName: \"kubernetes.io/projected/352d4de6-8c1e-462f-a7f6-6c4f2955707d-kube-api-access-pmt67\") pod \"nova-metadata-0\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " pod="openstack/nova-metadata-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.476886 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " pod="openstack/nova-metadata-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.476938 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " pod="openstack/nova-metadata-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.476980 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-config-data\") pod \"nova-metadata-0\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " pod="openstack/nova-metadata-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.477113 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/352d4de6-8c1e-462f-a7f6-6c4f2955707d-logs\") pod \"nova-metadata-0\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " pod="openstack/nova-metadata-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.477148 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmt67\" (UniqueName: \"kubernetes.io/projected/352d4de6-8c1e-462f-a7f6-6c4f2955707d-kube-api-access-pmt67\") pod \"nova-metadata-0\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " pod="openstack/nova-metadata-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.477607 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/352d4de6-8c1e-462f-a7f6-6c4f2955707d-logs\") pod \"nova-metadata-0\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " pod="openstack/nova-metadata-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.481014 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " pod="openstack/nova-metadata-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.481101 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " pod="openstack/nova-metadata-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.481315 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-config-data\") pod \"nova-metadata-0\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " pod="openstack/nova-metadata-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.496022 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmt67\" (UniqueName: \"kubernetes.io/projected/352d4de6-8c1e-462f-a7f6-6c4f2955707d-kube-api-access-pmt67\") pod \"nova-metadata-0\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " pod="openstack/nova-metadata-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.597566 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.893514 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="776abc4d-58f9-4028-ae6a-deeddad2a105" path="/var/lib/kubelet/pods/776abc4d-58f9-4028-ae6a-deeddad2a105/volumes" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.895190 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a35e003-9930-4dfd-b078-e017b90f1090" path="/var/lib/kubelet/pods/8a35e003-9930-4dfd-b078-e017b90f1090/volumes" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.909383 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"573c1817-260c-43b0-a892-f393e2d4ba07","Type":"ContainerStarted","Data":"9536b81d9ace5e17b2547ab800e1db37571450f6eb43337404e99a0d9ab7fcbe"} Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.909459 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.911557 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="9074694f-45ca-42e9-b596-ddffa2726b6c" containerName="nova-scheduler-scheduler" containerID="cri-o://ed7a88820f58c120a6ec6ef8616e9a897f37528108a9e98f1cefca4dc913b85a" gracePeriod=30 Oct 13 21:29:17 crc kubenswrapper[4689]: I1013 21:29:17.929066 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.929051571 podStartE2EDuration="2.929051571s" podCreationTimestamp="2025-10-13 21:29:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:29:17.919972067 +0000 UTC m=+1074.838217152" watchObservedRunningTime="2025-10-13 21:29:17.929051571 +0000 UTC m=+1074.847296656" Oct 13 21:29:18 crc kubenswrapper[4689]: I1013 21:29:18.025253 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:29:18 crc kubenswrapper[4689]: W1013 21:29:18.026947 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod352d4de6_8c1e_462f_a7f6_6c4f2955707d.slice/crio-aac86f0b8a0556f23d73d606b6f626b6935e8a4e6ea25c11dae4de5ec526e92d WatchSource:0}: Error finding container aac86f0b8a0556f23d73d606b6f626b6935e8a4e6ea25c11dae4de5ec526e92d: Status 404 returned error can't find the container with id aac86f0b8a0556f23d73d606b6f626b6935e8a4e6ea25c11dae4de5ec526e92d Oct 13 21:29:18 crc kubenswrapper[4689]: I1013 21:29:18.921812 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"352d4de6-8c1e-462f-a7f6-6c4f2955707d","Type":"ContainerStarted","Data":"e5446d890520594e7cd4f87b13adc12cdbe7156e8e05e3a1b8dcac4bb7cf3544"} Oct 13 21:29:18 crc kubenswrapper[4689]: I1013 21:29:18.922191 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"352d4de6-8c1e-462f-a7f6-6c4f2955707d","Type":"ContainerStarted","Data":"51a9ee343c97ca23523c01d3267c8b99ba569e26576ef385a31b1c2c483ec1d6"} Oct 13 21:29:18 crc kubenswrapper[4689]: I1013 21:29:18.922204 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"352d4de6-8c1e-462f-a7f6-6c4f2955707d","Type":"ContainerStarted","Data":"aac86f0b8a0556f23d73d606b6f626b6935e8a4e6ea25c11dae4de5ec526e92d"} Oct 13 21:29:18 crc kubenswrapper[4689]: I1013 21:29:18.941900 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.9418818020000002 podStartE2EDuration="1.941881802s" podCreationTimestamp="2025-10-13 21:29:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:29:18.936264979 +0000 UTC m=+1075.854510054" watchObservedRunningTime="2025-10-13 21:29:18.941881802 +0000 UTC m=+1075.860126887" Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.493207 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.619616 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvzqs\" (UniqueName: \"kubernetes.io/projected/9074694f-45ca-42e9-b596-ddffa2726b6c-kube-api-access-tvzqs\") pod \"9074694f-45ca-42e9-b596-ddffa2726b6c\" (UID: \"9074694f-45ca-42e9-b596-ddffa2726b6c\") " Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.619743 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9074694f-45ca-42e9-b596-ddffa2726b6c-config-data\") pod \"9074694f-45ca-42e9-b596-ddffa2726b6c\" (UID: \"9074694f-45ca-42e9-b596-ddffa2726b6c\") " Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.619885 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9074694f-45ca-42e9-b596-ddffa2726b6c-combined-ca-bundle\") pod \"9074694f-45ca-42e9-b596-ddffa2726b6c\" (UID: \"9074694f-45ca-42e9-b596-ddffa2726b6c\") " Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.633807 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9074694f-45ca-42e9-b596-ddffa2726b6c-kube-api-access-tvzqs" (OuterVolumeSpecName: "kube-api-access-tvzqs") pod "9074694f-45ca-42e9-b596-ddffa2726b6c" (UID: "9074694f-45ca-42e9-b596-ddffa2726b6c"). InnerVolumeSpecName "kube-api-access-tvzqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.646857 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9074694f-45ca-42e9-b596-ddffa2726b6c-config-data" (OuterVolumeSpecName: "config-data") pod "9074694f-45ca-42e9-b596-ddffa2726b6c" (UID: "9074694f-45ca-42e9-b596-ddffa2726b6c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.647881 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9074694f-45ca-42e9-b596-ddffa2726b6c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9074694f-45ca-42e9-b596-ddffa2726b6c" (UID: "9074694f-45ca-42e9-b596-ddffa2726b6c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.723175 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvzqs\" (UniqueName: \"kubernetes.io/projected/9074694f-45ca-42e9-b596-ddffa2726b6c-kube-api-access-tvzqs\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.723206 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9074694f-45ca-42e9-b596-ddffa2726b6c-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.723216 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9074694f-45ca-42e9-b596-ddffa2726b6c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.931565 4689 generic.go:334] "Generic (PLEG): container finished" podID="9074694f-45ca-42e9-b596-ddffa2726b6c" containerID="ed7a88820f58c120a6ec6ef8616e9a897f37528108a9e98f1cefca4dc913b85a" exitCode=0 Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.931783 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9074694f-45ca-42e9-b596-ddffa2726b6c","Type":"ContainerDied","Data":"ed7a88820f58c120a6ec6ef8616e9a897f37528108a9e98f1cefca4dc913b85a"} Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.932001 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9074694f-45ca-42e9-b596-ddffa2726b6c","Type":"ContainerDied","Data":"fc7c35b3ffb6f5fea2f70d364be18140f9f418796983bc470406a13c4ac4c682"} Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.932024 4689 scope.go:117] "RemoveContainer" containerID="ed7a88820f58c120a6ec6ef8616e9a897f37528108a9e98f1cefca4dc913b85a" Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.931849 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.956092 4689 scope.go:117] "RemoveContainer" containerID="ed7a88820f58c120a6ec6ef8616e9a897f37528108a9e98f1cefca4dc913b85a" Oct 13 21:29:19 crc kubenswrapper[4689]: E1013 21:29:19.956656 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed7a88820f58c120a6ec6ef8616e9a897f37528108a9e98f1cefca4dc913b85a\": container with ID starting with ed7a88820f58c120a6ec6ef8616e9a897f37528108a9e98f1cefca4dc913b85a not found: ID does not exist" containerID="ed7a88820f58c120a6ec6ef8616e9a897f37528108a9e98f1cefca4dc913b85a" Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.956711 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed7a88820f58c120a6ec6ef8616e9a897f37528108a9e98f1cefca4dc913b85a"} err="failed to get container status \"ed7a88820f58c120a6ec6ef8616e9a897f37528108a9e98f1cefca4dc913b85a\": rpc error: code = NotFound desc = could not find container \"ed7a88820f58c120a6ec6ef8616e9a897f37528108a9e98f1cefca4dc913b85a\": container with ID starting with ed7a88820f58c120a6ec6ef8616e9a897f37528108a9e98f1cefca4dc913b85a not found: ID does not exist" Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.959419 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.975383 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.985380 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 21:29:19 crc kubenswrapper[4689]: E1013 21:29:19.986012 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9074694f-45ca-42e9-b596-ddffa2726b6c" containerName="nova-scheduler-scheduler" Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.986111 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="9074694f-45ca-42e9-b596-ddffa2726b6c" containerName="nova-scheduler-scheduler" Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.986443 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="9074694f-45ca-42e9-b596-ddffa2726b6c" containerName="nova-scheduler-scheduler" Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.987181 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.990113 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 13 21:29:19 crc kubenswrapper[4689]: I1013 21:29:19.994083 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 21:29:20 crc kubenswrapper[4689]: I1013 21:29:20.129345 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67d3cb68-db98-4b94-91d1-bc032bf032bb-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"67d3cb68-db98-4b94-91d1-bc032bf032bb\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:20 crc kubenswrapper[4689]: I1013 21:29:20.129442 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cszz\" (UniqueName: \"kubernetes.io/projected/67d3cb68-db98-4b94-91d1-bc032bf032bb-kube-api-access-2cszz\") pod \"nova-scheduler-0\" (UID: \"67d3cb68-db98-4b94-91d1-bc032bf032bb\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:20 crc kubenswrapper[4689]: I1013 21:29:20.129486 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67d3cb68-db98-4b94-91d1-bc032bf032bb-config-data\") pod \"nova-scheduler-0\" (UID: \"67d3cb68-db98-4b94-91d1-bc032bf032bb\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:20 crc kubenswrapper[4689]: I1013 21:29:20.231409 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67d3cb68-db98-4b94-91d1-bc032bf032bb-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"67d3cb68-db98-4b94-91d1-bc032bf032bb\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:20 crc kubenswrapper[4689]: I1013 21:29:20.231555 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cszz\" (UniqueName: \"kubernetes.io/projected/67d3cb68-db98-4b94-91d1-bc032bf032bb-kube-api-access-2cszz\") pod \"nova-scheduler-0\" (UID: \"67d3cb68-db98-4b94-91d1-bc032bf032bb\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:20 crc kubenswrapper[4689]: I1013 21:29:20.231626 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67d3cb68-db98-4b94-91d1-bc032bf032bb-config-data\") pod \"nova-scheduler-0\" (UID: \"67d3cb68-db98-4b94-91d1-bc032bf032bb\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:20 crc kubenswrapper[4689]: I1013 21:29:20.235771 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67d3cb68-db98-4b94-91d1-bc032bf032bb-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"67d3cb68-db98-4b94-91d1-bc032bf032bb\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:20 crc kubenswrapper[4689]: I1013 21:29:20.237423 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67d3cb68-db98-4b94-91d1-bc032bf032bb-config-data\") pod \"nova-scheduler-0\" (UID: \"67d3cb68-db98-4b94-91d1-bc032bf032bb\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:20 crc kubenswrapper[4689]: I1013 21:29:20.246920 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cszz\" (UniqueName: \"kubernetes.io/projected/67d3cb68-db98-4b94-91d1-bc032bf032bb-kube-api-access-2cszz\") pod \"nova-scheduler-0\" (UID: \"67d3cb68-db98-4b94-91d1-bc032bf032bb\") " pod="openstack/nova-scheduler-0" Oct 13 21:29:20 crc kubenswrapper[4689]: I1013 21:29:20.315124 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 21:29:20 crc kubenswrapper[4689]: I1013 21:29:20.758496 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 21:29:20 crc kubenswrapper[4689]: I1013 21:29:20.941987 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"67d3cb68-db98-4b94-91d1-bc032bf032bb","Type":"ContainerStarted","Data":"12722b2d017d6733423863d3726de683de10f8d204b46b6c49e9fd1eeca1e63d"} Oct 13 21:29:20 crc kubenswrapper[4689]: I1013 21:29:20.942265 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"67d3cb68-db98-4b94-91d1-bc032bf032bb","Type":"ContainerStarted","Data":"eed87d3020e8e59a6887753c25ebec3c0647e655279d94ec401601ce308865f8"} Oct 13 21:29:20 crc kubenswrapper[4689]: I1013 21:29:20.965813 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.96579712 podStartE2EDuration="1.96579712s" podCreationTimestamp="2025-10-13 21:29:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:29:20.961753285 +0000 UTC m=+1077.879998370" watchObservedRunningTime="2025-10-13 21:29:20.96579712 +0000 UTC m=+1077.884042205" Oct 13 21:29:21 crc kubenswrapper[4689]: I1013 21:29:21.290395 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 13 21:29:21 crc kubenswrapper[4689]: I1013 21:29:21.879195 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9074694f-45ca-42e9-b596-ddffa2726b6c" path="/var/lib/kubelet/pods/9074694f-45ca-42e9-b596-ddffa2726b6c/volumes" Oct 13 21:29:21 crc kubenswrapper[4689]: I1013 21:29:21.928240 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 21:29:21 crc kubenswrapper[4689]: I1013 21:29:21.962631 4689 generic.go:334] "Generic (PLEG): container finished" podID="02def983-8ec2-47ee-bc06-cb08d42795f4" containerID="17facf47ddccdd7d73c1a0f2bf0ee59d47cfa9cb3c635ca30b29a4d5e273ce74" exitCode=0 Oct 13 21:29:21 crc kubenswrapper[4689]: I1013 21:29:21.962718 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 21:29:21 crc kubenswrapper[4689]: I1013 21:29:21.962789 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"02def983-8ec2-47ee-bc06-cb08d42795f4","Type":"ContainerDied","Data":"17facf47ddccdd7d73c1a0f2bf0ee59d47cfa9cb3c635ca30b29a4d5e273ce74"} Oct 13 21:29:21 crc kubenswrapper[4689]: I1013 21:29:21.962838 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"02def983-8ec2-47ee-bc06-cb08d42795f4","Type":"ContainerDied","Data":"df69fa4f5c6aee46927bdfeef172568535af0ac7373fea216969c568cc695601"} Oct 13 21:29:21 crc kubenswrapper[4689]: I1013 21:29:21.962880 4689 scope.go:117] "RemoveContainer" containerID="17facf47ddccdd7d73c1a0f2bf0ee59d47cfa9cb3c635ca30b29a4d5e273ce74" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.005026 4689 scope.go:117] "RemoveContainer" containerID="713f2431d705500d1a34d131da3f444cd725600ea364c7ce4a26f5f04a6ec262" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.026387 4689 scope.go:117] "RemoveContainer" containerID="17facf47ddccdd7d73c1a0f2bf0ee59d47cfa9cb3c635ca30b29a4d5e273ce74" Oct 13 21:29:22 crc kubenswrapper[4689]: E1013 21:29:22.026839 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17facf47ddccdd7d73c1a0f2bf0ee59d47cfa9cb3c635ca30b29a4d5e273ce74\": container with ID starting with 17facf47ddccdd7d73c1a0f2bf0ee59d47cfa9cb3c635ca30b29a4d5e273ce74 not found: ID does not exist" containerID="17facf47ddccdd7d73c1a0f2bf0ee59d47cfa9cb3c635ca30b29a4d5e273ce74" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.026887 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17facf47ddccdd7d73c1a0f2bf0ee59d47cfa9cb3c635ca30b29a4d5e273ce74"} err="failed to get container status \"17facf47ddccdd7d73c1a0f2bf0ee59d47cfa9cb3c635ca30b29a4d5e273ce74\": rpc error: code = NotFound desc = could not find container \"17facf47ddccdd7d73c1a0f2bf0ee59d47cfa9cb3c635ca30b29a4d5e273ce74\": container with ID starting with 17facf47ddccdd7d73c1a0f2bf0ee59d47cfa9cb3c635ca30b29a4d5e273ce74 not found: ID does not exist" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.026916 4689 scope.go:117] "RemoveContainer" containerID="713f2431d705500d1a34d131da3f444cd725600ea364c7ce4a26f5f04a6ec262" Oct 13 21:29:22 crc kubenswrapper[4689]: E1013 21:29:22.027173 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"713f2431d705500d1a34d131da3f444cd725600ea364c7ce4a26f5f04a6ec262\": container with ID starting with 713f2431d705500d1a34d131da3f444cd725600ea364c7ce4a26f5f04a6ec262 not found: ID does not exist" containerID="713f2431d705500d1a34d131da3f444cd725600ea364c7ce4a26f5f04a6ec262" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.027202 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"713f2431d705500d1a34d131da3f444cd725600ea364c7ce4a26f5f04a6ec262"} err="failed to get container status \"713f2431d705500d1a34d131da3f444cd725600ea364c7ce4a26f5f04a6ec262\": rpc error: code = NotFound desc = could not find container \"713f2431d705500d1a34d131da3f444cd725600ea364c7ce4a26f5f04a6ec262\": container with ID starting with 713f2431d705500d1a34d131da3f444cd725600ea364c7ce4a26f5f04a6ec262 not found: ID does not exist" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.069378 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97smg\" (UniqueName: \"kubernetes.io/projected/02def983-8ec2-47ee-bc06-cb08d42795f4-kube-api-access-97smg\") pod \"02def983-8ec2-47ee-bc06-cb08d42795f4\" (UID: \"02def983-8ec2-47ee-bc06-cb08d42795f4\") " Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.069439 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02def983-8ec2-47ee-bc06-cb08d42795f4-logs\") pod \"02def983-8ec2-47ee-bc06-cb08d42795f4\" (UID: \"02def983-8ec2-47ee-bc06-cb08d42795f4\") " Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.069478 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02def983-8ec2-47ee-bc06-cb08d42795f4-combined-ca-bundle\") pod \"02def983-8ec2-47ee-bc06-cb08d42795f4\" (UID: \"02def983-8ec2-47ee-bc06-cb08d42795f4\") " Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.069532 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02def983-8ec2-47ee-bc06-cb08d42795f4-config-data\") pod \"02def983-8ec2-47ee-bc06-cb08d42795f4\" (UID: \"02def983-8ec2-47ee-bc06-cb08d42795f4\") " Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.071256 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02def983-8ec2-47ee-bc06-cb08d42795f4-logs" (OuterVolumeSpecName: "logs") pod "02def983-8ec2-47ee-bc06-cb08d42795f4" (UID: "02def983-8ec2-47ee-bc06-cb08d42795f4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.074880 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02def983-8ec2-47ee-bc06-cb08d42795f4-kube-api-access-97smg" (OuterVolumeSpecName: "kube-api-access-97smg") pod "02def983-8ec2-47ee-bc06-cb08d42795f4" (UID: "02def983-8ec2-47ee-bc06-cb08d42795f4"). InnerVolumeSpecName "kube-api-access-97smg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.098761 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02def983-8ec2-47ee-bc06-cb08d42795f4-config-data" (OuterVolumeSpecName: "config-data") pod "02def983-8ec2-47ee-bc06-cb08d42795f4" (UID: "02def983-8ec2-47ee-bc06-cb08d42795f4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.101679 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02def983-8ec2-47ee-bc06-cb08d42795f4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02def983-8ec2-47ee-bc06-cb08d42795f4" (UID: "02def983-8ec2-47ee-bc06-cb08d42795f4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.172601 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97smg\" (UniqueName: \"kubernetes.io/projected/02def983-8ec2-47ee-bc06-cb08d42795f4-kube-api-access-97smg\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.172642 4689 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02def983-8ec2-47ee-bc06-cb08d42795f4-logs\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.172659 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02def983-8ec2-47ee-bc06-cb08d42795f4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.172672 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02def983-8ec2-47ee-bc06-cb08d42795f4-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.325324 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.334889 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.344857 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 13 21:29:22 crc kubenswrapper[4689]: E1013 21:29:22.345224 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02def983-8ec2-47ee-bc06-cb08d42795f4" containerName="nova-api-api" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.345243 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="02def983-8ec2-47ee-bc06-cb08d42795f4" containerName="nova-api-api" Oct 13 21:29:22 crc kubenswrapper[4689]: E1013 21:29:22.345263 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02def983-8ec2-47ee-bc06-cb08d42795f4" containerName="nova-api-log" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.345269 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="02def983-8ec2-47ee-bc06-cb08d42795f4" containerName="nova-api-log" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.345469 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="02def983-8ec2-47ee-bc06-cb08d42795f4" containerName="nova-api-api" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.345501 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="02def983-8ec2-47ee-bc06-cb08d42795f4" containerName="nova-api-log" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.346508 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.352393 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.359866 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.488845 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2cvv\" (UniqueName: \"kubernetes.io/projected/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-kube-api-access-r2cvv\") pod \"nova-api-0\" (UID: \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\") " pod="openstack/nova-api-0" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.488906 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-config-data\") pod \"nova-api-0\" (UID: \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\") " pod="openstack/nova-api-0" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.488929 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\") " pod="openstack/nova-api-0" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.488973 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-logs\") pod \"nova-api-0\" (UID: \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\") " pod="openstack/nova-api-0" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.591106 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-config-data\") pod \"nova-api-0\" (UID: \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\") " pod="openstack/nova-api-0" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.591716 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\") " pod="openstack/nova-api-0" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.591817 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-logs\") pod \"nova-api-0\" (UID: \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\") " pod="openstack/nova-api-0" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.592039 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2cvv\" (UniqueName: \"kubernetes.io/projected/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-kube-api-access-r2cvv\") pod \"nova-api-0\" (UID: \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\") " pod="openstack/nova-api-0" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.593827 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-logs\") pod \"nova-api-0\" (UID: \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\") " pod="openstack/nova-api-0" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.597114 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\") " pod="openstack/nova-api-0" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.598660 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.598724 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.604181 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-config-data\") pod \"nova-api-0\" (UID: \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\") " pod="openstack/nova-api-0" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.608741 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2cvv\" (UniqueName: \"kubernetes.io/projected/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-kube-api-access-r2cvv\") pod \"nova-api-0\" (UID: \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\") " pod="openstack/nova-api-0" Oct 13 21:29:22 crc kubenswrapper[4689]: I1013 21:29:22.716121 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 21:29:23 crc kubenswrapper[4689]: I1013 21:29:23.185578 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 13 21:29:23 crc kubenswrapper[4689]: I1013 21:29:23.882452 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02def983-8ec2-47ee-bc06-cb08d42795f4" path="/var/lib/kubelet/pods/02def983-8ec2-47ee-bc06-cb08d42795f4/volumes" Oct 13 21:29:23 crc kubenswrapper[4689]: I1013 21:29:23.983658 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c","Type":"ContainerStarted","Data":"55f118cbe2378daf0f7ced17724d68367c19df26cd8326e448ac9770bdd10ea3"} Oct 13 21:29:23 crc kubenswrapper[4689]: I1013 21:29:23.983711 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c","Type":"ContainerStarted","Data":"73e9b3c8fcd1166236dee9a4d63657a3e3fd8a37372c3b739800321a20ef948e"} Oct 13 21:29:23 crc kubenswrapper[4689]: I1013 21:29:23.983725 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c","Type":"ContainerStarted","Data":"25b8106ec24acabe9621ab351615f57d455a9307e7dd96f54c5b3219bf8146de"} Oct 13 21:29:24 crc kubenswrapper[4689]: I1013 21:29:24.005307 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.005283304 podStartE2EDuration="2.005283304s" podCreationTimestamp="2025-10-13 21:29:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:29:23.999960028 +0000 UTC m=+1080.918205113" watchObservedRunningTime="2025-10-13 21:29:24.005283304 +0000 UTC m=+1080.923528389" Oct 13 21:29:25 crc kubenswrapper[4689]: I1013 21:29:25.315238 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 13 21:29:26 crc kubenswrapper[4689]: I1013 21:29:26.940938 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 13 21:29:27 crc kubenswrapper[4689]: I1013 21:29:27.598831 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 13 21:29:27 crc kubenswrapper[4689]: I1013 21:29:27.598890 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 13 21:29:28 crc kubenswrapper[4689]: I1013 21:29:28.612857 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="352d4de6-8c1e-462f-a7f6-6c4f2955707d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 13 21:29:28 crc kubenswrapper[4689]: I1013 21:29:28.612856 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="352d4de6-8c1e-462f-a7f6-6c4f2955707d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 13 21:29:30 crc kubenswrapper[4689]: I1013 21:29:30.316227 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 13 21:29:30 crc kubenswrapper[4689]: I1013 21:29:30.345057 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 13 21:29:30 crc kubenswrapper[4689]: I1013 21:29:30.450563 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 21:29:30 crc kubenswrapper[4689]: I1013 21:29:30.451130 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="bbf31bb3-2ca0-4223-9b65-fb4081267ea9" containerName="kube-state-metrics" containerID="cri-o://902218d274a938c6c6bb5213b835e9706dd30aa78c42d5ef6b71beaa5ca29570" gracePeriod=30 Oct 13 21:29:30 crc kubenswrapper[4689]: I1013 21:29:30.911773 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.038711 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddcx2\" (UniqueName: \"kubernetes.io/projected/bbf31bb3-2ca0-4223-9b65-fb4081267ea9-kube-api-access-ddcx2\") pod \"bbf31bb3-2ca0-4223-9b65-fb4081267ea9\" (UID: \"bbf31bb3-2ca0-4223-9b65-fb4081267ea9\") " Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.045573 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbf31bb3-2ca0-4223-9b65-fb4081267ea9-kube-api-access-ddcx2" (OuterVolumeSpecName: "kube-api-access-ddcx2") pod "bbf31bb3-2ca0-4223-9b65-fb4081267ea9" (UID: "bbf31bb3-2ca0-4223-9b65-fb4081267ea9"). InnerVolumeSpecName "kube-api-access-ddcx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.059271 4689 generic.go:334] "Generic (PLEG): container finished" podID="bbf31bb3-2ca0-4223-9b65-fb4081267ea9" containerID="902218d274a938c6c6bb5213b835e9706dd30aa78c42d5ef6b71beaa5ca29570" exitCode=2 Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.059333 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.059323 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"bbf31bb3-2ca0-4223-9b65-fb4081267ea9","Type":"ContainerDied","Data":"902218d274a938c6c6bb5213b835e9706dd30aa78c42d5ef6b71beaa5ca29570"} Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.059389 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"bbf31bb3-2ca0-4223-9b65-fb4081267ea9","Type":"ContainerDied","Data":"c8f3f8ad8811c84d9716c350e364cc43686325556da97917bee893fb75ccf1d9"} Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.059419 4689 scope.go:117] "RemoveContainer" containerID="902218d274a938c6c6bb5213b835e9706dd30aa78c42d5ef6b71beaa5ca29570" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.097289 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.115264 4689 scope.go:117] "RemoveContainer" containerID="902218d274a938c6c6bb5213b835e9706dd30aa78c42d5ef6b71beaa5ca29570" Oct 13 21:29:31 crc kubenswrapper[4689]: E1013 21:29:31.115815 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"902218d274a938c6c6bb5213b835e9706dd30aa78c42d5ef6b71beaa5ca29570\": container with ID starting with 902218d274a938c6c6bb5213b835e9706dd30aa78c42d5ef6b71beaa5ca29570 not found: ID does not exist" containerID="902218d274a938c6c6bb5213b835e9706dd30aa78c42d5ef6b71beaa5ca29570" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.115856 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"902218d274a938c6c6bb5213b835e9706dd30aa78c42d5ef6b71beaa5ca29570"} err="failed to get container status \"902218d274a938c6c6bb5213b835e9706dd30aa78c42d5ef6b71beaa5ca29570\": rpc error: code = NotFound desc = could not find container \"902218d274a938c6c6bb5213b835e9706dd30aa78c42d5ef6b71beaa5ca29570\": container with ID starting with 902218d274a938c6c6bb5213b835e9706dd30aa78c42d5ef6b71beaa5ca29570 not found: ID does not exist" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.119432 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.130808 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.141112 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddcx2\" (UniqueName: \"kubernetes.io/projected/bbf31bb3-2ca0-4223-9b65-fb4081267ea9-kube-api-access-ddcx2\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.144008 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 21:29:31 crc kubenswrapper[4689]: E1013 21:29:31.144620 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbf31bb3-2ca0-4223-9b65-fb4081267ea9" containerName="kube-state-metrics" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.144732 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbf31bb3-2ca0-4223-9b65-fb4081267ea9" containerName="kube-state-metrics" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.144979 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbf31bb3-2ca0-4223-9b65-fb4081267ea9" containerName="kube-state-metrics" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.145663 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.149271 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.150388 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.199069 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.242186 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c2195ac-449d-47d0-9a1b-b512a0c6b44e-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"0c2195ac-449d-47d0-9a1b-b512a0c6b44e\") " pod="openstack/kube-state-metrics-0" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.242431 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8rkw\" (UniqueName: \"kubernetes.io/projected/0c2195ac-449d-47d0-9a1b-b512a0c6b44e-kube-api-access-p8rkw\") pod \"kube-state-metrics-0\" (UID: \"0c2195ac-449d-47d0-9a1b-b512a0c6b44e\") " pod="openstack/kube-state-metrics-0" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.242605 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/0c2195ac-449d-47d0-9a1b-b512a0c6b44e-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"0c2195ac-449d-47d0-9a1b-b512a0c6b44e\") " pod="openstack/kube-state-metrics-0" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.242737 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c2195ac-449d-47d0-9a1b-b512a0c6b44e-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"0c2195ac-449d-47d0-9a1b-b512a0c6b44e\") " pod="openstack/kube-state-metrics-0" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.345058 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c2195ac-449d-47d0-9a1b-b512a0c6b44e-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"0c2195ac-449d-47d0-9a1b-b512a0c6b44e\") " pod="openstack/kube-state-metrics-0" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.346052 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c2195ac-449d-47d0-9a1b-b512a0c6b44e-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"0c2195ac-449d-47d0-9a1b-b512a0c6b44e\") " pod="openstack/kube-state-metrics-0" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.346154 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8rkw\" (UniqueName: \"kubernetes.io/projected/0c2195ac-449d-47d0-9a1b-b512a0c6b44e-kube-api-access-p8rkw\") pod \"kube-state-metrics-0\" (UID: \"0c2195ac-449d-47d0-9a1b-b512a0c6b44e\") " pod="openstack/kube-state-metrics-0" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.346283 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/0c2195ac-449d-47d0-9a1b-b512a0c6b44e-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"0c2195ac-449d-47d0-9a1b-b512a0c6b44e\") " pod="openstack/kube-state-metrics-0" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.349542 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c2195ac-449d-47d0-9a1b-b512a0c6b44e-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"0c2195ac-449d-47d0-9a1b-b512a0c6b44e\") " pod="openstack/kube-state-metrics-0" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.351435 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c2195ac-449d-47d0-9a1b-b512a0c6b44e-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"0c2195ac-449d-47d0-9a1b-b512a0c6b44e\") " pod="openstack/kube-state-metrics-0" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.361323 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8rkw\" (UniqueName: \"kubernetes.io/projected/0c2195ac-449d-47d0-9a1b-b512a0c6b44e-kube-api-access-p8rkw\") pod \"kube-state-metrics-0\" (UID: \"0c2195ac-449d-47d0-9a1b-b512a0c6b44e\") " pod="openstack/kube-state-metrics-0" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.371088 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/0c2195ac-449d-47d0-9a1b-b512a0c6b44e-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"0c2195ac-449d-47d0-9a1b-b512a0c6b44e\") " pod="openstack/kube-state-metrics-0" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.481706 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 13 21:29:31 crc kubenswrapper[4689]: I1013 21:29:31.878127 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbf31bb3-2ca0-4223-9b65-fb4081267ea9" path="/var/lib/kubelet/pods/bbf31bb3-2ca0-4223-9b65-fb4081267ea9/volumes" Oct 13 21:29:32 crc kubenswrapper[4689]: I1013 21:29:32.016452 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 13 21:29:32 crc kubenswrapper[4689]: W1013 21:29:32.024183 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c2195ac_449d_47d0_9a1b_b512a0c6b44e.slice/crio-f48e2f7c6da57265cf29e860391f3e3a990366c6906ac31131be5260308eeaf6 WatchSource:0}: Error finding container f48e2f7c6da57265cf29e860391f3e3a990366c6906ac31131be5260308eeaf6: Status 404 returned error can't find the container with id f48e2f7c6da57265cf29e860391f3e3a990366c6906ac31131be5260308eeaf6 Oct 13 21:29:32 crc kubenswrapper[4689]: I1013 21:29:32.070354 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0c2195ac-449d-47d0-9a1b-b512a0c6b44e","Type":"ContainerStarted","Data":"f48e2f7c6da57265cf29e860391f3e3a990366c6906ac31131be5260308eeaf6"} Oct 13 21:29:32 crc kubenswrapper[4689]: I1013 21:29:32.094359 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:29:32 crc kubenswrapper[4689]: I1013 21:29:32.094672 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerName="ceilometer-central-agent" containerID="cri-o://f28ca62d92901de8d9af61c5eaafce83b62a3c82837fdae01b7acdb9feedd249" gracePeriod=30 Oct 13 21:29:32 crc kubenswrapper[4689]: I1013 21:29:32.094732 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerName="proxy-httpd" containerID="cri-o://3a1ba0b926a1ff2b613f5207f9a23c0897c7452f92c897102c42881acc308de5" gracePeriod=30 Oct 13 21:29:32 crc kubenswrapper[4689]: I1013 21:29:32.094810 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerName="ceilometer-notification-agent" containerID="cri-o://5ee776dda9f8a7ce96d07013693695d95da474a740a1eff403e7428993679417" gracePeriod=30 Oct 13 21:29:32 crc kubenswrapper[4689]: I1013 21:29:32.094912 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerName="sg-core" containerID="cri-o://45872f5f9957ff921395d4ab988b88c19d90a7e32db53069f3f69cea0835799d" gracePeriod=30 Oct 13 21:29:32 crc kubenswrapper[4689]: I1013 21:29:32.716914 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 13 21:29:32 crc kubenswrapper[4689]: I1013 21:29:32.716965 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 13 21:29:33 crc kubenswrapper[4689]: I1013 21:29:33.079940 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"0c2195ac-449d-47d0-9a1b-b512a0c6b44e","Type":"ContainerStarted","Data":"5dcfaabb59ca7b47bc95574530b0fcd26d854199f405ed840f91407997595c84"} Oct 13 21:29:33 crc kubenswrapper[4689]: I1013 21:29:33.080276 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 13 21:29:33 crc kubenswrapper[4689]: I1013 21:29:33.083442 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dae00fa-4534-4058-b6c1-d416074d2f7d","Type":"ContainerDied","Data":"3a1ba0b926a1ff2b613f5207f9a23c0897c7452f92c897102c42881acc308de5"} Oct 13 21:29:33 crc kubenswrapper[4689]: I1013 21:29:33.083444 4689 generic.go:334] "Generic (PLEG): container finished" podID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerID="3a1ba0b926a1ff2b613f5207f9a23c0897c7452f92c897102c42881acc308de5" exitCode=0 Oct 13 21:29:33 crc kubenswrapper[4689]: I1013 21:29:33.083489 4689 generic.go:334] "Generic (PLEG): container finished" podID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerID="45872f5f9957ff921395d4ab988b88c19d90a7e32db53069f3f69cea0835799d" exitCode=2 Oct 13 21:29:33 crc kubenswrapper[4689]: I1013 21:29:33.083501 4689 generic.go:334] "Generic (PLEG): container finished" podID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerID="f28ca62d92901de8d9af61c5eaafce83b62a3c82837fdae01b7acdb9feedd249" exitCode=0 Oct 13 21:29:33 crc kubenswrapper[4689]: I1013 21:29:33.083515 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dae00fa-4534-4058-b6c1-d416074d2f7d","Type":"ContainerDied","Data":"45872f5f9957ff921395d4ab988b88c19d90a7e32db53069f3f69cea0835799d"} Oct 13 21:29:33 crc kubenswrapper[4689]: I1013 21:29:33.083525 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dae00fa-4534-4058-b6c1-d416074d2f7d","Type":"ContainerDied","Data":"f28ca62d92901de8d9af61c5eaafce83b62a3c82837fdae01b7acdb9feedd249"} Oct 13 21:29:33 crc kubenswrapper[4689]: I1013 21:29:33.103179 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.6801684890000002 podStartE2EDuration="2.103158699s" podCreationTimestamp="2025-10-13 21:29:31 +0000 UTC" firstStartedPulling="2025-10-13 21:29:32.026670605 +0000 UTC m=+1088.944915690" lastFinishedPulling="2025-10-13 21:29:32.449660815 +0000 UTC m=+1089.367905900" observedRunningTime="2025-10-13 21:29:33.092890367 +0000 UTC m=+1090.011135452" watchObservedRunningTime="2025-10-13 21:29:33.103158699 +0000 UTC m=+1090.021403784" Oct 13 21:29:33 crc kubenswrapper[4689]: I1013 21:29:33.798761 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.198:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 13 21:29:33 crc kubenswrapper[4689]: I1013 21:29:33.798850 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.198:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.128471 4689 generic.go:334] "Generic (PLEG): container finished" podID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerID="5ee776dda9f8a7ce96d07013693695d95da474a740a1eff403e7428993679417" exitCode=0 Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.128705 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dae00fa-4534-4058-b6c1-d416074d2f7d","Type":"ContainerDied","Data":"5ee776dda9f8a7ce96d07013693695d95da474a740a1eff403e7428993679417"} Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.254148 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.358785 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dae00fa-4534-4058-b6c1-d416074d2f7d-log-httpd\") pod \"4dae00fa-4534-4058-b6c1-d416074d2f7d\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.358851 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-combined-ca-bundle\") pod \"4dae00fa-4534-4058-b6c1-d416074d2f7d\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.358931 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dae00fa-4534-4058-b6c1-d416074d2f7d-run-httpd\") pod \"4dae00fa-4534-4058-b6c1-d416074d2f7d\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.358975 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-sg-core-conf-yaml\") pod \"4dae00fa-4534-4058-b6c1-d416074d2f7d\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.359002 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-scripts\") pod \"4dae00fa-4534-4058-b6c1-d416074d2f7d\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.359088 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zc82k\" (UniqueName: \"kubernetes.io/projected/4dae00fa-4534-4058-b6c1-d416074d2f7d-kube-api-access-zc82k\") pod \"4dae00fa-4534-4058-b6c1-d416074d2f7d\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.359148 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-config-data\") pod \"4dae00fa-4534-4058-b6c1-d416074d2f7d\" (UID: \"4dae00fa-4534-4058-b6c1-d416074d2f7d\") " Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.359374 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dae00fa-4534-4058-b6c1-d416074d2f7d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4dae00fa-4534-4058-b6c1-d416074d2f7d" (UID: "4dae00fa-4534-4058-b6c1-d416074d2f7d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.359806 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dae00fa-4534-4058-b6c1-d416074d2f7d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4dae00fa-4534-4058-b6c1-d416074d2f7d" (UID: "4dae00fa-4534-4058-b6c1-d416074d2f7d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.359894 4689 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dae00fa-4534-4058-b6c1-d416074d2f7d-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.364943 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dae00fa-4534-4058-b6c1-d416074d2f7d-kube-api-access-zc82k" (OuterVolumeSpecName: "kube-api-access-zc82k") pod "4dae00fa-4534-4058-b6c1-d416074d2f7d" (UID: "4dae00fa-4534-4058-b6c1-d416074d2f7d"). InnerVolumeSpecName "kube-api-access-zc82k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.377497 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-scripts" (OuterVolumeSpecName: "scripts") pod "4dae00fa-4534-4058-b6c1-d416074d2f7d" (UID: "4dae00fa-4534-4058-b6c1-d416074d2f7d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.391347 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4dae00fa-4534-4058-b6c1-d416074d2f7d" (UID: "4dae00fa-4534-4058-b6c1-d416074d2f7d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.461754 4689 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dae00fa-4534-4058-b6c1-d416074d2f7d-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.461786 4689 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.461798 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.461809 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zc82k\" (UniqueName: \"kubernetes.io/projected/4dae00fa-4534-4058-b6c1-d416074d2f7d-kube-api-access-zc82k\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.479965 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4dae00fa-4534-4058-b6c1-d416074d2f7d" (UID: "4dae00fa-4534-4058-b6c1-d416074d2f7d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.484830 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-config-data" (OuterVolumeSpecName: "config-data") pod "4dae00fa-4534-4058-b6c1-d416074d2f7d" (UID: "4dae00fa-4534-4058-b6c1-d416074d2f7d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.563895 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.563929 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dae00fa-4534-4058-b6c1-d416074d2f7d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.602561 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.607142 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 13 21:29:37 crc kubenswrapper[4689]: I1013 21:29:37.629654 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.147232 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4dae00fa-4534-4058-b6c1-d416074d2f7d","Type":"ContainerDied","Data":"cf6e5d925a0de9c1d7712e84f9bd80736b3b80c607148e8db491bbfa40d51e4f"} Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.147291 4689 scope.go:117] "RemoveContainer" containerID="3a1ba0b926a1ff2b613f5207f9a23c0897c7452f92c897102c42881acc308de5" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.147327 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.155047 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.193809 4689 scope.go:117] "RemoveContainer" containerID="45872f5f9957ff921395d4ab988b88c19d90a7e32db53069f3f69cea0835799d" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.198946 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.211854 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.222554 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:29:38 crc kubenswrapper[4689]: E1013 21:29:38.223484 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerName="ceilometer-central-agent" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.223509 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerName="ceilometer-central-agent" Oct 13 21:29:38 crc kubenswrapper[4689]: E1013 21:29:38.223559 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerName="ceilometer-notification-agent" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.223568 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerName="ceilometer-notification-agent" Oct 13 21:29:38 crc kubenswrapper[4689]: E1013 21:29:38.223631 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerName="sg-core" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.223642 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerName="sg-core" Oct 13 21:29:38 crc kubenswrapper[4689]: E1013 21:29:38.223694 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerName="proxy-httpd" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.223704 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerName="proxy-httpd" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.224046 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerName="proxy-httpd" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.224102 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerName="ceilometer-central-agent" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.224142 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerName="ceilometer-notification-agent" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.224193 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dae00fa-4534-4058-b6c1-d416074d2f7d" containerName="sg-core" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.230024 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.238367 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.238863 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.239854 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.241985 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.244041 4689 scope.go:117] "RemoveContainer" containerID="5ee776dda9f8a7ce96d07013693695d95da474a740a1eff403e7428993679417" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.281654 4689 scope.go:117] "RemoveContainer" containerID="f28ca62d92901de8d9af61c5eaafce83b62a3c82837fdae01b7acdb9feedd249" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.390507 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708ab341-c93c-413b-b656-d416c67a0f32-run-httpd\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.390575 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-scripts\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.390652 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708ab341-c93c-413b-b656-d416c67a0f32-log-httpd\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.390852 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.390910 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.390979 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxwsz\" (UniqueName: \"kubernetes.io/projected/708ab341-c93c-413b-b656-d416c67a0f32-kube-api-access-qxwsz\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.391095 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-config-data\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.391269 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.493493 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-config-data\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.494406 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.494459 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708ab341-c93c-413b-b656-d416c67a0f32-run-httpd\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.494478 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-scripts\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.494523 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708ab341-c93c-413b-b656-d416c67a0f32-log-httpd\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.494615 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.494649 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.494690 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxwsz\" (UniqueName: \"kubernetes.io/projected/708ab341-c93c-413b-b656-d416c67a0f32-kube-api-access-qxwsz\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.494978 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708ab341-c93c-413b-b656-d416c67a0f32-run-httpd\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.495397 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708ab341-c93c-413b-b656-d416c67a0f32-log-httpd\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.500093 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.511505 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.511969 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-scripts\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.512428 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.521921 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-config-data\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.531620 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxwsz\" (UniqueName: \"kubernetes.io/projected/708ab341-c93c-413b-b656-d416c67a0f32-kube-api-access-qxwsz\") pod \"ceilometer-0\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " pod="openstack/ceilometer-0" Oct 13 21:29:38 crc kubenswrapper[4689]: I1013 21:29:38.562177 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:29:39 crc kubenswrapper[4689]: I1013 21:29:39.013488 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:29:39 crc kubenswrapper[4689]: I1013 21:29:39.166170 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"708ab341-c93c-413b-b656-d416c67a0f32","Type":"ContainerStarted","Data":"418a89778b4c3309386c0e90c540485ef08c07627076f431f0e67f1d61e813c7"} Oct 13 21:29:39 crc kubenswrapper[4689]: I1013 21:29:39.909506 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dae00fa-4534-4058-b6c1-d416074d2f7d" path="/var/lib/kubelet/pods/4dae00fa-4534-4058-b6c1-d416074d2f7d/volumes" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.034983 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.134764 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/762ff29c-7685-452a-bd06-561632aed1d9-config-data\") pod \"762ff29c-7685-452a-bd06-561632aed1d9\" (UID: \"762ff29c-7685-452a-bd06-561632aed1d9\") " Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.134842 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/762ff29c-7685-452a-bd06-561632aed1d9-combined-ca-bundle\") pod \"762ff29c-7685-452a-bd06-561632aed1d9\" (UID: \"762ff29c-7685-452a-bd06-561632aed1d9\") " Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.134930 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cql5h\" (UniqueName: \"kubernetes.io/projected/762ff29c-7685-452a-bd06-561632aed1d9-kube-api-access-cql5h\") pod \"762ff29c-7685-452a-bd06-561632aed1d9\" (UID: \"762ff29c-7685-452a-bd06-561632aed1d9\") " Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.145545 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/762ff29c-7685-452a-bd06-561632aed1d9-kube-api-access-cql5h" (OuterVolumeSpecName: "kube-api-access-cql5h") pod "762ff29c-7685-452a-bd06-561632aed1d9" (UID: "762ff29c-7685-452a-bd06-561632aed1d9"). InnerVolumeSpecName "kube-api-access-cql5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.169239 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/762ff29c-7685-452a-bd06-561632aed1d9-config-data" (OuterVolumeSpecName: "config-data") pod "762ff29c-7685-452a-bd06-561632aed1d9" (UID: "762ff29c-7685-452a-bd06-561632aed1d9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.169856 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/762ff29c-7685-452a-bd06-561632aed1d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "762ff29c-7685-452a-bd06-561632aed1d9" (UID: "762ff29c-7685-452a-bd06-561632aed1d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.187166 4689 generic.go:334] "Generic (PLEG): container finished" podID="762ff29c-7685-452a-bd06-561632aed1d9" containerID="f92ec5d0fda0ab92140b6a131dcf9eb9afc315fd8345995242f1cd9568d908d5" exitCode=137 Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.187334 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"762ff29c-7685-452a-bd06-561632aed1d9","Type":"ContainerDied","Data":"f92ec5d0fda0ab92140b6a131dcf9eb9afc315fd8345995242f1cd9568d908d5"} Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.187386 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"762ff29c-7685-452a-bd06-561632aed1d9","Type":"ContainerDied","Data":"ab65a88a21ab41915d9195e8d168fb13a5e8e89b0ba8f7a1b02233edd03e9d3a"} Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.187411 4689 scope.go:117] "RemoveContainer" containerID="f92ec5d0fda0ab92140b6a131dcf9eb9afc315fd8345995242f1cd9568d908d5" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.187571 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.199530 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"708ab341-c93c-413b-b656-d416c67a0f32","Type":"ContainerStarted","Data":"967c875c102973540b60ba251b23a16873b229b778c0ff923d74c5818bf6f541"} Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.247832 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/762ff29c-7685-452a-bd06-561632aed1d9-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.247873 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/762ff29c-7685-452a-bd06-561632aed1d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.247887 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cql5h\" (UniqueName: \"kubernetes.io/projected/762ff29c-7685-452a-bd06-561632aed1d9-kube-api-access-cql5h\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.249653 4689 scope.go:117] "RemoveContainer" containerID="f92ec5d0fda0ab92140b6a131dcf9eb9afc315fd8345995242f1cd9568d908d5" Oct 13 21:29:40 crc kubenswrapper[4689]: E1013 21:29:40.250232 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f92ec5d0fda0ab92140b6a131dcf9eb9afc315fd8345995242f1cd9568d908d5\": container with ID starting with f92ec5d0fda0ab92140b6a131dcf9eb9afc315fd8345995242f1cd9568d908d5 not found: ID does not exist" containerID="f92ec5d0fda0ab92140b6a131dcf9eb9afc315fd8345995242f1cd9568d908d5" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.250279 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f92ec5d0fda0ab92140b6a131dcf9eb9afc315fd8345995242f1cd9568d908d5"} err="failed to get container status \"f92ec5d0fda0ab92140b6a131dcf9eb9afc315fd8345995242f1cd9568d908d5\": rpc error: code = NotFound desc = could not find container \"f92ec5d0fda0ab92140b6a131dcf9eb9afc315fd8345995242f1cd9568d908d5\": container with ID starting with f92ec5d0fda0ab92140b6a131dcf9eb9afc315fd8345995242f1cd9568d908d5 not found: ID does not exist" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.257130 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.279963 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.290572 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 21:29:40 crc kubenswrapper[4689]: E1013 21:29:40.291283 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="762ff29c-7685-452a-bd06-561632aed1d9" containerName="nova-cell1-novncproxy-novncproxy" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.291314 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="762ff29c-7685-452a-bd06-561632aed1d9" containerName="nova-cell1-novncproxy-novncproxy" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.291609 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="762ff29c-7685-452a-bd06-561632aed1d9" containerName="nova-cell1-novncproxy-novncproxy" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.293444 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.298962 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.299571 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.299752 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.320419 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.451619 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b0b0a0a-623c-42db-abb2-50a50d924793-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7b0b0a0a-623c-42db-abb2-50a50d924793\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.451687 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b0b0a0a-623c-42db-abb2-50a50d924793-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7b0b0a0a-623c-42db-abb2-50a50d924793\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.451750 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b0b0a0a-623c-42db-abb2-50a50d924793-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7b0b0a0a-623c-42db-abb2-50a50d924793\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.451797 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b0b0a0a-623c-42db-abb2-50a50d924793-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7b0b0a0a-623c-42db-abb2-50a50d924793\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.451833 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-242qh\" (UniqueName: \"kubernetes.io/projected/7b0b0a0a-623c-42db-abb2-50a50d924793-kube-api-access-242qh\") pod \"nova-cell1-novncproxy-0\" (UID: \"7b0b0a0a-623c-42db-abb2-50a50d924793\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.553359 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b0b0a0a-623c-42db-abb2-50a50d924793-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7b0b0a0a-623c-42db-abb2-50a50d924793\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.553410 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b0b0a0a-623c-42db-abb2-50a50d924793-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7b0b0a0a-623c-42db-abb2-50a50d924793\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.553484 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b0b0a0a-623c-42db-abb2-50a50d924793-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7b0b0a0a-623c-42db-abb2-50a50d924793\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.553547 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b0b0a0a-623c-42db-abb2-50a50d924793-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7b0b0a0a-623c-42db-abb2-50a50d924793\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.553615 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-242qh\" (UniqueName: \"kubernetes.io/projected/7b0b0a0a-623c-42db-abb2-50a50d924793-kube-api-access-242qh\") pod \"nova-cell1-novncproxy-0\" (UID: \"7b0b0a0a-623c-42db-abb2-50a50d924793\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.558653 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b0b0a0a-623c-42db-abb2-50a50d924793-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7b0b0a0a-623c-42db-abb2-50a50d924793\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.559097 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b0b0a0a-623c-42db-abb2-50a50d924793-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7b0b0a0a-623c-42db-abb2-50a50d924793\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.559213 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b0b0a0a-623c-42db-abb2-50a50d924793-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7b0b0a0a-623c-42db-abb2-50a50d924793\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.559315 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b0b0a0a-623c-42db-abb2-50a50d924793-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"7b0b0a0a-623c-42db-abb2-50a50d924793\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.569647 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-242qh\" (UniqueName: \"kubernetes.io/projected/7b0b0a0a-623c-42db-abb2-50a50d924793-kube-api-access-242qh\") pod \"nova-cell1-novncproxy-0\" (UID: \"7b0b0a0a-623c-42db-abb2-50a50d924793\") " pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:40 crc kubenswrapper[4689]: I1013 21:29:40.686448 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:41 crc kubenswrapper[4689]: I1013 21:29:41.139137 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 13 21:29:41 crc kubenswrapper[4689]: I1013 21:29:41.212895 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"7b0b0a0a-623c-42db-abb2-50a50d924793","Type":"ContainerStarted","Data":"2b372d73921b7a70dd75a5a3c445232bff083b0a2e97cc29c189bb744b74c069"} Oct 13 21:29:41 crc kubenswrapper[4689]: I1013 21:29:41.215136 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"708ab341-c93c-413b-b656-d416c67a0f32","Type":"ContainerStarted","Data":"b04311f04981efa4004208be3ff279e1dc95beaac46d791383181552e3d4d38e"} Oct 13 21:29:41 crc kubenswrapper[4689]: I1013 21:29:41.494152 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 13 21:29:41 crc kubenswrapper[4689]: I1013 21:29:41.883204 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="762ff29c-7685-452a-bd06-561632aed1d9" path="/var/lib/kubelet/pods/762ff29c-7685-452a-bd06-561632aed1d9/volumes" Oct 13 21:29:42 crc kubenswrapper[4689]: I1013 21:29:42.231866 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"7b0b0a0a-623c-42db-abb2-50a50d924793","Type":"ContainerStarted","Data":"1ed76aac746f8c6c264722e6f650168f7137403b4946850782033489bc448b17"} Oct 13 21:29:42 crc kubenswrapper[4689]: I1013 21:29:42.237148 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"708ab341-c93c-413b-b656-d416c67a0f32","Type":"ContainerStarted","Data":"a318cf7384899552208c79158b4f84d9e990cc043d087b5e49deeff390d0f1b0"} Oct 13 21:29:42 crc kubenswrapper[4689]: I1013 21:29:42.268869 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.268845205 podStartE2EDuration="2.268845205s" podCreationTimestamp="2025-10-13 21:29:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:29:42.259029743 +0000 UTC m=+1099.177274828" watchObservedRunningTime="2025-10-13 21:29:42.268845205 +0000 UTC m=+1099.187090290" Oct 13 21:29:42 crc kubenswrapper[4689]: I1013 21:29:42.724082 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 13 21:29:42 crc kubenswrapper[4689]: I1013 21:29:42.724971 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 13 21:29:42 crc kubenswrapper[4689]: I1013 21:29:42.728480 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 13 21:29:42 crc kubenswrapper[4689]: I1013 21:29:42.730519 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.244643 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.247348 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.466650 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-gkk99"] Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.468453 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.490774 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-gkk99"] Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.626976 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.627293 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.627494 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-684dh\" (UniqueName: \"kubernetes.io/projected/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-kube-api-access-684dh\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.627612 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.627644 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-config\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.627692 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.729207 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.729375 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.729424 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-684dh\" (UniqueName: \"kubernetes.io/projected/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-kube-api-access-684dh\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.729507 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.729535 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-config\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.729568 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.730451 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.730516 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.730546 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.730571 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.730823 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-config\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.748494 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-684dh\" (UniqueName: \"kubernetes.io/projected/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-kube-api-access-684dh\") pod \"dnsmasq-dns-89c5cd4d5-gkk99\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:43 crc kubenswrapper[4689]: I1013 21:29:43.789048 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:44 crc kubenswrapper[4689]: I1013 21:29:44.255652 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"708ab341-c93c-413b-b656-d416c67a0f32","Type":"ContainerStarted","Data":"51c220e179106b9ab7d0229078d68504b7c655b4fb2d8545b321dcde518f02ef"} Oct 13 21:29:44 crc kubenswrapper[4689]: I1013 21:29:44.256122 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 13 21:29:44 crc kubenswrapper[4689]: I1013 21:29:44.304598 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.280535436 podStartE2EDuration="6.304546652s" podCreationTimestamp="2025-10-13 21:29:38 +0000 UTC" firstStartedPulling="2025-10-13 21:29:39.037441668 +0000 UTC m=+1095.955686753" lastFinishedPulling="2025-10-13 21:29:43.061452884 +0000 UTC m=+1099.979697969" observedRunningTime="2025-10-13 21:29:44.295440687 +0000 UTC m=+1101.213685772" watchObservedRunningTime="2025-10-13 21:29:44.304546652 +0000 UTC m=+1101.222791737" Oct 13 21:29:44 crc kubenswrapper[4689]: I1013 21:29:44.340664 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-gkk99"] Oct 13 21:29:44 crc kubenswrapper[4689]: W1013 21:29:44.345895 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c640abb_53ea_4a06_80b4_5e3d8df8f0fa.slice/crio-329d1acaa977109e76eea07f0c7ebd3c713341bad522401a4e6e547efe615c9e WatchSource:0}: Error finding container 329d1acaa977109e76eea07f0c7ebd3c713341bad522401a4e6e547efe615c9e: Status 404 returned error can't find the container with id 329d1acaa977109e76eea07f0c7ebd3c713341bad522401a4e6e547efe615c9e Oct 13 21:29:45 crc kubenswrapper[4689]: I1013 21:29:45.264876 4689 generic.go:334] "Generic (PLEG): container finished" podID="0c640abb-53ea-4a06-80b4-5e3d8df8f0fa" containerID="2b5daf233bf8dda8d4106390cd17bacd27201c734f6ce8ba349c93e34d93740f" exitCode=0 Oct 13 21:29:45 crc kubenswrapper[4689]: I1013 21:29:45.264963 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" event={"ID":"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa","Type":"ContainerDied","Data":"2b5daf233bf8dda8d4106390cd17bacd27201c734f6ce8ba349c93e34d93740f"} Oct 13 21:29:45 crc kubenswrapper[4689]: I1013 21:29:45.265380 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" event={"ID":"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa","Type":"ContainerStarted","Data":"329d1acaa977109e76eea07f0c7ebd3c713341bad522401a4e6e547efe615c9e"} Oct 13 21:29:45 crc kubenswrapper[4689]: I1013 21:29:45.687543 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:46 crc kubenswrapper[4689]: I1013 21:29:46.006287 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 13 21:29:46 crc kubenswrapper[4689]: I1013 21:29:46.292214 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c" containerName="nova-api-log" containerID="cri-o://73e9b3c8fcd1166236dee9a4d63657a3e3fd8a37372c3b739800321a20ef948e" gracePeriod=30 Oct 13 21:29:46 crc kubenswrapper[4689]: I1013 21:29:46.293345 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" event={"ID":"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa","Type":"ContainerStarted","Data":"4e866764e4423b401700ef7b154a4db96d7e8759e97e912f79146763888c88eb"} Oct 13 21:29:46 crc kubenswrapper[4689]: I1013 21:29:46.293380 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:46 crc kubenswrapper[4689]: I1013 21:29:46.293662 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c" containerName="nova-api-api" containerID="cri-o://55f118cbe2378daf0f7ced17724d68367c19df26cd8326e448ac9770bdd10ea3" gracePeriod=30 Oct 13 21:29:46 crc kubenswrapper[4689]: I1013 21:29:46.328142 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" podStartSLOduration=3.328118902 podStartE2EDuration="3.328118902s" podCreationTimestamp="2025-10-13 21:29:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:29:46.319898898 +0000 UTC m=+1103.238143983" watchObservedRunningTime="2025-10-13 21:29:46.328118902 +0000 UTC m=+1103.246363987" Oct 13 21:29:46 crc kubenswrapper[4689]: I1013 21:29:46.850612 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:29:46 crc kubenswrapper[4689]: I1013 21:29:46.851099 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="708ab341-c93c-413b-b656-d416c67a0f32" containerName="ceilometer-central-agent" containerID="cri-o://967c875c102973540b60ba251b23a16873b229b778c0ff923d74c5818bf6f541" gracePeriod=30 Oct 13 21:29:46 crc kubenswrapper[4689]: I1013 21:29:46.851199 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="708ab341-c93c-413b-b656-d416c67a0f32" containerName="proxy-httpd" containerID="cri-o://51c220e179106b9ab7d0229078d68504b7c655b4fb2d8545b321dcde518f02ef" gracePeriod=30 Oct 13 21:29:46 crc kubenswrapper[4689]: I1013 21:29:46.851295 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="708ab341-c93c-413b-b656-d416c67a0f32" containerName="sg-core" containerID="cri-o://a318cf7384899552208c79158b4f84d9e990cc043d087b5e49deeff390d0f1b0" gracePeriod=30 Oct 13 21:29:46 crc kubenswrapper[4689]: I1013 21:29:46.851325 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="708ab341-c93c-413b-b656-d416c67a0f32" containerName="ceilometer-notification-agent" containerID="cri-o://b04311f04981efa4004208be3ff279e1dc95beaac46d791383181552e3d4d38e" gracePeriod=30 Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.332165 4689 generic.go:334] "Generic (PLEG): container finished" podID="708ab341-c93c-413b-b656-d416c67a0f32" containerID="51c220e179106b9ab7d0229078d68504b7c655b4fb2d8545b321dcde518f02ef" exitCode=0 Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.332197 4689 generic.go:334] "Generic (PLEG): container finished" podID="708ab341-c93c-413b-b656-d416c67a0f32" containerID="a318cf7384899552208c79158b4f84d9e990cc043d087b5e49deeff390d0f1b0" exitCode=2 Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.332204 4689 generic.go:334] "Generic (PLEG): container finished" podID="708ab341-c93c-413b-b656-d416c67a0f32" containerID="b04311f04981efa4004208be3ff279e1dc95beaac46d791383181552e3d4d38e" exitCode=0 Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.332211 4689 generic.go:334] "Generic (PLEG): container finished" podID="708ab341-c93c-413b-b656-d416c67a0f32" containerID="967c875c102973540b60ba251b23a16873b229b778c0ff923d74c5818bf6f541" exitCode=0 Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.332257 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"708ab341-c93c-413b-b656-d416c67a0f32","Type":"ContainerDied","Data":"51c220e179106b9ab7d0229078d68504b7c655b4fb2d8545b321dcde518f02ef"} Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.332312 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"708ab341-c93c-413b-b656-d416c67a0f32","Type":"ContainerDied","Data":"a318cf7384899552208c79158b4f84d9e990cc043d087b5e49deeff390d0f1b0"} Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.332327 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"708ab341-c93c-413b-b656-d416c67a0f32","Type":"ContainerDied","Data":"b04311f04981efa4004208be3ff279e1dc95beaac46d791383181552e3d4d38e"} Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.332338 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"708ab341-c93c-413b-b656-d416c67a0f32","Type":"ContainerDied","Data":"967c875c102973540b60ba251b23a16873b229b778c0ff923d74c5818bf6f541"} Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.334514 4689 generic.go:334] "Generic (PLEG): container finished" podID="3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c" containerID="73e9b3c8fcd1166236dee9a4d63657a3e3fd8a37372c3b739800321a20ef948e" exitCode=143 Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.334608 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c","Type":"ContainerDied","Data":"73e9b3c8fcd1166236dee9a4d63657a3e3fd8a37372c3b739800321a20ef948e"} Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.663971 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.802940 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-sg-core-conf-yaml\") pod \"708ab341-c93c-413b-b656-d416c67a0f32\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.802989 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-scripts\") pod \"708ab341-c93c-413b-b656-d416c67a0f32\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.803068 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708ab341-c93c-413b-b656-d416c67a0f32-run-httpd\") pod \"708ab341-c93c-413b-b656-d416c67a0f32\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.803120 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-ceilometer-tls-certs\") pod \"708ab341-c93c-413b-b656-d416c67a0f32\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.803144 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708ab341-c93c-413b-b656-d416c67a0f32-log-httpd\") pod \"708ab341-c93c-413b-b656-d416c67a0f32\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.803189 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-config-data\") pod \"708ab341-c93c-413b-b656-d416c67a0f32\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.803252 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-combined-ca-bundle\") pod \"708ab341-c93c-413b-b656-d416c67a0f32\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.803286 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxwsz\" (UniqueName: \"kubernetes.io/projected/708ab341-c93c-413b-b656-d416c67a0f32-kube-api-access-qxwsz\") pod \"708ab341-c93c-413b-b656-d416c67a0f32\" (UID: \"708ab341-c93c-413b-b656-d416c67a0f32\") " Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.803576 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/708ab341-c93c-413b-b656-d416c67a0f32-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "708ab341-c93c-413b-b656-d416c67a0f32" (UID: "708ab341-c93c-413b-b656-d416c67a0f32"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.803807 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/708ab341-c93c-413b-b656-d416c67a0f32-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "708ab341-c93c-413b-b656-d416c67a0f32" (UID: "708ab341-c93c-413b-b656-d416c67a0f32"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.804362 4689 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708ab341-c93c-413b-b656-d416c67a0f32-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.804385 4689 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708ab341-c93c-413b-b656-d416c67a0f32-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.809122 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-scripts" (OuterVolumeSpecName: "scripts") pod "708ab341-c93c-413b-b656-d416c67a0f32" (UID: "708ab341-c93c-413b-b656-d416c67a0f32"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.815853 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/708ab341-c93c-413b-b656-d416c67a0f32-kube-api-access-qxwsz" (OuterVolumeSpecName: "kube-api-access-qxwsz") pod "708ab341-c93c-413b-b656-d416c67a0f32" (UID: "708ab341-c93c-413b-b656-d416c67a0f32"). InnerVolumeSpecName "kube-api-access-qxwsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.846691 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "708ab341-c93c-413b-b656-d416c67a0f32" (UID: "708ab341-c93c-413b-b656-d416c67a0f32"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.863719 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "708ab341-c93c-413b-b656-d416c67a0f32" (UID: "708ab341-c93c-413b-b656-d416c67a0f32"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.886054 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "708ab341-c93c-413b-b656-d416c67a0f32" (UID: "708ab341-c93c-413b-b656-d416c67a0f32"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.905765 4689 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.905795 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.905807 4689 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.905817 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.905825 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxwsz\" (UniqueName: \"kubernetes.io/projected/708ab341-c93c-413b-b656-d416c67a0f32-kube-api-access-qxwsz\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:47 crc kubenswrapper[4689]: I1013 21:29:47.910263 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-config-data" (OuterVolumeSpecName: "config-data") pod "708ab341-c93c-413b-b656-d416c67a0f32" (UID: "708ab341-c93c-413b-b656-d416c67a0f32"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.007655 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/708ab341-c93c-413b-b656-d416c67a0f32-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.346516 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"708ab341-c93c-413b-b656-d416c67a0f32","Type":"ContainerDied","Data":"418a89778b4c3309386c0e90c540485ef08c07627076f431f0e67f1d61e813c7"} Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.346580 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.346805 4689 scope.go:117] "RemoveContainer" containerID="51c220e179106b9ab7d0229078d68504b7c655b4fb2d8545b321dcde518f02ef" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.368715 4689 scope.go:117] "RemoveContainer" containerID="a318cf7384899552208c79158b4f84d9e990cc043d087b5e49deeff390d0f1b0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.385889 4689 scope.go:117] "RemoveContainer" containerID="b04311f04981efa4004208be3ff279e1dc95beaac46d791383181552e3d4d38e" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.402510 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.412226 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.415478 4689 scope.go:117] "RemoveContainer" containerID="967c875c102973540b60ba251b23a16873b229b778c0ff923d74c5818bf6f541" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.435022 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:29:48 crc kubenswrapper[4689]: E1013 21:29:48.435438 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="708ab341-c93c-413b-b656-d416c67a0f32" containerName="sg-core" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.435458 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="708ab341-c93c-413b-b656-d416c67a0f32" containerName="sg-core" Oct 13 21:29:48 crc kubenswrapper[4689]: E1013 21:29:48.435477 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="708ab341-c93c-413b-b656-d416c67a0f32" containerName="proxy-httpd" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.435485 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="708ab341-c93c-413b-b656-d416c67a0f32" containerName="proxy-httpd" Oct 13 21:29:48 crc kubenswrapper[4689]: E1013 21:29:48.435498 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="708ab341-c93c-413b-b656-d416c67a0f32" containerName="ceilometer-central-agent" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.435506 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="708ab341-c93c-413b-b656-d416c67a0f32" containerName="ceilometer-central-agent" Oct 13 21:29:48 crc kubenswrapper[4689]: E1013 21:29:48.435517 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="708ab341-c93c-413b-b656-d416c67a0f32" containerName="ceilometer-notification-agent" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.435524 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="708ab341-c93c-413b-b656-d416c67a0f32" containerName="ceilometer-notification-agent" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.435762 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="708ab341-c93c-413b-b656-d416c67a0f32" containerName="sg-core" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.435793 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="708ab341-c93c-413b-b656-d416c67a0f32" containerName="ceilometer-central-agent" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.435811 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="708ab341-c93c-413b-b656-d416c67a0f32" containerName="proxy-httpd" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.435822 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="708ab341-c93c-413b-b656-d416c67a0f32" containerName="ceilometer-notification-agent" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.437869 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.440148 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.440287 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.440369 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.455732 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.529921 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:29:48 crc kubenswrapper[4689]: E1013 21:29:48.530887 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[ceilometer-tls-certs combined-ca-bundle config-data kube-api-access-w62wl log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[ceilometer-tls-certs combined-ca-bundle config-data kube-api-access-w62wl log-httpd run-httpd scripts sg-core-conf-yaml]: context canceled" pod="openstack/ceilometer-0" podUID="8217459e-fc4e-4bfa-9696-331cf1d999f1" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.620130 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.620387 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w62wl\" (UniqueName: \"kubernetes.io/projected/8217459e-fc4e-4bfa-9696-331cf1d999f1-kube-api-access-w62wl\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.620492 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.620624 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-config-data\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.620747 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.620851 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8217459e-fc4e-4bfa-9696-331cf1d999f1-run-httpd\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.620891 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8217459e-fc4e-4bfa-9696-331cf1d999f1-log-httpd\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.620930 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-scripts\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.722235 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.722300 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8217459e-fc4e-4bfa-9696-331cf1d999f1-run-httpd\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.722334 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8217459e-fc4e-4bfa-9696-331cf1d999f1-log-httpd\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.722374 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-scripts\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.722407 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.722461 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w62wl\" (UniqueName: \"kubernetes.io/projected/8217459e-fc4e-4bfa-9696-331cf1d999f1-kube-api-access-w62wl\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.722483 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.722513 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-config-data\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.723462 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8217459e-fc4e-4bfa-9696-331cf1d999f1-run-httpd\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.723801 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8217459e-fc4e-4bfa-9696-331cf1d999f1-log-httpd\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.726987 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.728282 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-config-data\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.729315 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.729452 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-scripts\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.736010 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:48 crc kubenswrapper[4689]: I1013 21:29:48.756486 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w62wl\" (UniqueName: \"kubernetes.io/projected/8217459e-fc4e-4bfa-9696-331cf1d999f1-kube-api-access-w62wl\") pod \"ceilometer-0\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " pod="openstack/ceilometer-0" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.358121 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.370457 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.537108 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-ceilometer-tls-certs\") pod \"8217459e-fc4e-4bfa-9696-331cf1d999f1\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.537406 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-scripts\") pod \"8217459e-fc4e-4bfa-9696-331cf1d999f1\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.537554 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-combined-ca-bundle\") pod \"8217459e-fc4e-4bfa-9696-331cf1d999f1\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.537673 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-sg-core-conf-yaml\") pod \"8217459e-fc4e-4bfa-9696-331cf1d999f1\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.537845 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-config-data\") pod \"8217459e-fc4e-4bfa-9696-331cf1d999f1\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.537940 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w62wl\" (UniqueName: \"kubernetes.io/projected/8217459e-fc4e-4bfa-9696-331cf1d999f1-kube-api-access-w62wl\") pod \"8217459e-fc4e-4bfa-9696-331cf1d999f1\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.538080 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8217459e-fc4e-4bfa-9696-331cf1d999f1-log-httpd\") pod \"8217459e-fc4e-4bfa-9696-331cf1d999f1\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.538207 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8217459e-fc4e-4bfa-9696-331cf1d999f1-run-httpd\") pod \"8217459e-fc4e-4bfa-9696-331cf1d999f1\" (UID: \"8217459e-fc4e-4bfa-9696-331cf1d999f1\") " Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.538708 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8217459e-fc4e-4bfa-9696-331cf1d999f1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8217459e-fc4e-4bfa-9696-331cf1d999f1" (UID: "8217459e-fc4e-4bfa-9696-331cf1d999f1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.538991 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8217459e-fc4e-4bfa-9696-331cf1d999f1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8217459e-fc4e-4bfa-9696-331cf1d999f1" (UID: "8217459e-fc4e-4bfa-9696-331cf1d999f1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.543447 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "8217459e-fc4e-4bfa-9696-331cf1d999f1" (UID: "8217459e-fc4e-4bfa-9696-331cf1d999f1"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.544756 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-scripts" (OuterVolumeSpecName: "scripts") pod "8217459e-fc4e-4bfa-9696-331cf1d999f1" (UID: "8217459e-fc4e-4bfa-9696-331cf1d999f1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.544812 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8217459e-fc4e-4bfa-9696-331cf1d999f1" (UID: "8217459e-fc4e-4bfa-9696-331cf1d999f1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.544866 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8217459e-fc4e-4bfa-9696-331cf1d999f1" (UID: "8217459e-fc4e-4bfa-9696-331cf1d999f1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.544721 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-config-data" (OuterVolumeSpecName: "config-data") pod "8217459e-fc4e-4bfa-9696-331cf1d999f1" (UID: "8217459e-fc4e-4bfa-9696-331cf1d999f1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.548641 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8217459e-fc4e-4bfa-9696-331cf1d999f1-kube-api-access-w62wl" (OuterVolumeSpecName: "kube-api-access-w62wl") pod "8217459e-fc4e-4bfa-9696-331cf1d999f1" (UID: "8217459e-fc4e-4bfa-9696-331cf1d999f1"). InnerVolumeSpecName "kube-api-access-w62wl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.640851 4689 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8217459e-fc4e-4bfa-9696-331cf1d999f1-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.640999 4689 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.641067 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.641132 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.641204 4689 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.641262 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8217459e-fc4e-4bfa-9696-331cf1d999f1-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.641327 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w62wl\" (UniqueName: \"kubernetes.io/projected/8217459e-fc4e-4bfa-9696-331cf1d999f1-kube-api-access-w62wl\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.641388 4689 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8217459e-fc4e-4bfa-9696-331cf1d999f1-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.877733 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 21:29:49 crc kubenswrapper[4689]: I1013 21:29:49.880293 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="708ab341-c93c-413b-b656-d416c67a0f32" path="/var/lib/kubelet/pods/708ab341-c93c-413b-b656-d416c67a0f32/volumes" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.048694 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-combined-ca-bundle\") pod \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\" (UID: \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\") " Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.048792 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-logs\") pod \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\" (UID: \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\") " Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.048840 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2cvv\" (UniqueName: \"kubernetes.io/projected/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-kube-api-access-r2cvv\") pod \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\" (UID: \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\") " Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.048880 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-config-data\") pod \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\" (UID: \"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c\") " Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.050266 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-logs" (OuterVolumeSpecName: "logs") pod "3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c" (UID: "3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.053772 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-kube-api-access-r2cvv" (OuterVolumeSpecName: "kube-api-access-r2cvv") pod "3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c" (UID: "3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c"). InnerVolumeSpecName "kube-api-access-r2cvv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.075619 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c" (UID: "3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.082831 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-config-data" (OuterVolumeSpecName: "config-data") pod "3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c" (UID: "3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.151536 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.151570 4689 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-logs\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.151598 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2cvv\" (UniqueName: \"kubernetes.io/projected/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-kube-api-access-r2cvv\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.151610 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.371531 4689 generic.go:334] "Generic (PLEG): container finished" podID="3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c" containerID="55f118cbe2378daf0f7ced17724d68367c19df26cd8326e448ac9770bdd10ea3" exitCode=0 Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.371599 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c","Type":"ContainerDied","Data":"55f118cbe2378daf0f7ced17724d68367c19df26cd8326e448ac9770bdd10ea3"} Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.372084 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.372117 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c","Type":"ContainerDied","Data":"25b8106ec24acabe9621ab351615f57d455a9307e7dd96f54c5b3219bf8146de"} Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.372158 4689 scope.go:117] "RemoveContainer" containerID="55f118cbe2378daf0f7ced17724d68367c19df26cd8326e448ac9770bdd10ea3" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.371652 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.402113 4689 scope.go:117] "RemoveContainer" containerID="73e9b3c8fcd1166236dee9a4d63657a3e3fd8a37372c3b739800321a20ef948e" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.449549 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.509252 4689 scope.go:117] "RemoveContainer" containerID="55f118cbe2378daf0f7ced17724d68367c19df26cd8326e448ac9770bdd10ea3" Oct 13 21:29:50 crc kubenswrapper[4689]: E1013 21:29:50.523023 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55f118cbe2378daf0f7ced17724d68367c19df26cd8326e448ac9770bdd10ea3\": container with ID starting with 55f118cbe2378daf0f7ced17724d68367c19df26cd8326e448ac9770bdd10ea3 not found: ID does not exist" containerID="55f118cbe2378daf0f7ced17724d68367c19df26cd8326e448ac9770bdd10ea3" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.523069 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55f118cbe2378daf0f7ced17724d68367c19df26cd8326e448ac9770bdd10ea3"} err="failed to get container status \"55f118cbe2378daf0f7ced17724d68367c19df26cd8326e448ac9770bdd10ea3\": rpc error: code = NotFound desc = could not find container \"55f118cbe2378daf0f7ced17724d68367c19df26cd8326e448ac9770bdd10ea3\": container with ID starting with 55f118cbe2378daf0f7ced17724d68367c19df26cd8326e448ac9770bdd10ea3 not found: ID does not exist" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.523110 4689 scope.go:117] "RemoveContainer" containerID="73e9b3c8fcd1166236dee9a4d63657a3e3fd8a37372c3b739800321a20ef948e" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.523429 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:29:50 crc kubenswrapper[4689]: E1013 21:29:50.526885 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73e9b3c8fcd1166236dee9a4d63657a3e3fd8a37372c3b739800321a20ef948e\": container with ID starting with 73e9b3c8fcd1166236dee9a4d63657a3e3fd8a37372c3b739800321a20ef948e not found: ID does not exist" containerID="73e9b3c8fcd1166236dee9a4d63657a3e3fd8a37372c3b739800321a20ef948e" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.526935 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73e9b3c8fcd1166236dee9a4d63657a3e3fd8a37372c3b739800321a20ef948e"} err="failed to get container status \"73e9b3c8fcd1166236dee9a4d63657a3e3fd8a37372c3b739800321a20ef948e\": rpc error: code = NotFound desc = could not find container \"73e9b3c8fcd1166236dee9a4d63657a3e3fd8a37372c3b739800321a20ef948e\": container with ID starting with 73e9b3c8fcd1166236dee9a4d63657a3e3fd8a37372c3b739800321a20ef948e not found: ID does not exist" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.581639 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.610658 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.641314 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:29:50 crc kubenswrapper[4689]: E1013 21:29:50.641732 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c" containerName="nova-api-api" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.641747 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c" containerName="nova-api-api" Oct 13 21:29:50 crc kubenswrapper[4689]: E1013 21:29:50.641767 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c" containerName="nova-api-log" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.641774 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c" containerName="nova-api-log" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.642022 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c" containerName="nova-api-api" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.642045 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c" containerName="nova-api-log" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.643949 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.651266 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.651605 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.651755 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.679651 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.689642 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.698634 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.700295 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.708095 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.708318 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.708431 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.715208 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.727522 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.743660 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-log-httpd\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.743726 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-run-httpd\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.743803 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.743861 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.743886 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-config-data\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.743923 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.743942 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwcfr\" (UniqueName: \"kubernetes.io/projected/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-kube-api-access-qwcfr\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.743965 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-scripts\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.846107 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-scripts\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.846162 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.846257 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-public-tls-certs\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.846503 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-log-httpd\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.846677 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.846776 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-run-httpd\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.846828 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f844962-adc2-4345-b031-a5a7a9e003e3-logs\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.846902 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.846990 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-config-data\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.847043 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8hdg\" (UniqueName: \"kubernetes.io/projected/3f844962-adc2-4345-b031-a5a7a9e003e3-kube-api-access-t8hdg\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.847048 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-run-httpd\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.847116 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-log-httpd\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.847143 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.847307 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-config-data\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.847663 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.847716 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwcfr\" (UniqueName: \"kubernetes.io/projected/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-kube-api-access-qwcfr\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.855254 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-config-data\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.855724 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-scripts\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.856290 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.856538 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.863477 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.868031 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwcfr\" (UniqueName: \"kubernetes.io/projected/08fb7ee0-eee9-49f6-82e2-007abc19bd3b-kube-api-access-qwcfr\") pod \"ceilometer-0\" (UID: \"08fb7ee0-eee9-49f6-82e2-007abc19bd3b\") " pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.949921 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.950052 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-public-tls-certs\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.950082 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.950119 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f844962-adc2-4345-b031-a5a7a9e003e3-logs\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.950160 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-config-data\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.950182 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8hdg\" (UniqueName: \"kubernetes.io/projected/3f844962-adc2-4345-b031-a5a7a9e003e3-kube-api-access-t8hdg\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.951560 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f844962-adc2-4345-b031-a5a7a9e003e3-logs\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.954826 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-config-data\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.955021 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.955032 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.956405 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-public-tls-certs\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.961535 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 13 21:29:50 crc kubenswrapper[4689]: I1013 21:29:50.972973 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8hdg\" (UniqueName: \"kubernetes.io/projected/3f844962-adc2-4345-b031-a5a7a9e003e3-kube-api-access-t8hdg\") pod \"nova-api-0\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " pod="openstack/nova-api-0" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.020556 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.396202 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.491490 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.563372 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.577061 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-p9p9j"] Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.578463 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-p9p9j" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.584210 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.584394 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.590765 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-p9p9j"] Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.663299 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tnwl\" (UniqueName: \"kubernetes.io/projected/13461759-7aa1-47ea-9bed-2346213bcde6-kube-api-access-5tnwl\") pod \"nova-cell1-cell-mapping-p9p9j\" (UID: \"13461759-7aa1-47ea-9bed-2346213bcde6\") " pod="openstack/nova-cell1-cell-mapping-p9p9j" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.663369 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-config-data\") pod \"nova-cell1-cell-mapping-p9p9j\" (UID: \"13461759-7aa1-47ea-9bed-2346213bcde6\") " pod="openstack/nova-cell1-cell-mapping-p9p9j" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.663569 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-p9p9j\" (UID: \"13461759-7aa1-47ea-9bed-2346213bcde6\") " pod="openstack/nova-cell1-cell-mapping-p9p9j" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.663714 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-scripts\") pod \"nova-cell1-cell-mapping-p9p9j\" (UID: \"13461759-7aa1-47ea-9bed-2346213bcde6\") " pod="openstack/nova-cell1-cell-mapping-p9p9j" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.766689 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-p9p9j\" (UID: \"13461759-7aa1-47ea-9bed-2346213bcde6\") " pod="openstack/nova-cell1-cell-mapping-p9p9j" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.767208 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-scripts\") pod \"nova-cell1-cell-mapping-p9p9j\" (UID: \"13461759-7aa1-47ea-9bed-2346213bcde6\") " pod="openstack/nova-cell1-cell-mapping-p9p9j" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.767305 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tnwl\" (UniqueName: \"kubernetes.io/projected/13461759-7aa1-47ea-9bed-2346213bcde6-kube-api-access-5tnwl\") pod \"nova-cell1-cell-mapping-p9p9j\" (UID: \"13461759-7aa1-47ea-9bed-2346213bcde6\") " pod="openstack/nova-cell1-cell-mapping-p9p9j" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.767362 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-config-data\") pod \"nova-cell1-cell-mapping-p9p9j\" (UID: \"13461759-7aa1-47ea-9bed-2346213bcde6\") " pod="openstack/nova-cell1-cell-mapping-p9p9j" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.770484 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-p9p9j\" (UID: \"13461759-7aa1-47ea-9bed-2346213bcde6\") " pod="openstack/nova-cell1-cell-mapping-p9p9j" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.770523 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-scripts\") pod \"nova-cell1-cell-mapping-p9p9j\" (UID: \"13461759-7aa1-47ea-9bed-2346213bcde6\") " pod="openstack/nova-cell1-cell-mapping-p9p9j" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.770893 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-config-data\") pod \"nova-cell1-cell-mapping-p9p9j\" (UID: \"13461759-7aa1-47ea-9bed-2346213bcde6\") " pod="openstack/nova-cell1-cell-mapping-p9p9j" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.790813 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tnwl\" (UniqueName: \"kubernetes.io/projected/13461759-7aa1-47ea-9bed-2346213bcde6-kube-api-access-5tnwl\") pod \"nova-cell1-cell-mapping-p9p9j\" (UID: \"13461759-7aa1-47ea-9bed-2346213bcde6\") " pod="openstack/nova-cell1-cell-mapping-p9p9j" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.878799 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c" path="/var/lib/kubelet/pods/3e14cb22-b4e8-4f72-a3e4-1d7b21a3463c/volumes" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.879504 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8217459e-fc4e-4bfa-9696-331cf1d999f1" path="/var/lib/kubelet/pods/8217459e-fc4e-4bfa-9696-331cf1d999f1/volumes" Oct 13 21:29:51 crc kubenswrapper[4689]: I1013 21:29:51.901619 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-p9p9j" Oct 13 21:29:52 crc kubenswrapper[4689]: I1013 21:29:52.389962 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3f844962-adc2-4345-b031-a5a7a9e003e3","Type":"ContainerStarted","Data":"58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d"} Oct 13 21:29:52 crc kubenswrapper[4689]: I1013 21:29:52.390812 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3f844962-adc2-4345-b031-a5a7a9e003e3","Type":"ContainerStarted","Data":"ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce"} Oct 13 21:29:52 crc kubenswrapper[4689]: I1013 21:29:52.390928 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3f844962-adc2-4345-b031-a5a7a9e003e3","Type":"ContainerStarted","Data":"bdb752e01c94c910b69dc54e5816d63fd312007f7d56ae23c73ea5b9b178dbbb"} Oct 13 21:29:52 crc kubenswrapper[4689]: I1013 21:29:52.392432 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08fb7ee0-eee9-49f6-82e2-007abc19bd3b","Type":"ContainerStarted","Data":"ccbada687e7d3bc80c9fa87136bdc711f45d88bb08b272f37f68062a4e0e5faf"} Oct 13 21:29:52 crc kubenswrapper[4689]: I1013 21:29:52.392480 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08fb7ee0-eee9-49f6-82e2-007abc19bd3b","Type":"ContainerStarted","Data":"c863095291875608dfc203ec256709776589ab5501450728838a421042d56da4"} Oct 13 21:29:52 crc kubenswrapper[4689]: I1013 21:29:52.409967 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-p9p9j"] Oct 13 21:29:53 crc kubenswrapper[4689]: I1013 21:29:53.403090 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-p9p9j" event={"ID":"13461759-7aa1-47ea-9bed-2346213bcde6","Type":"ContainerStarted","Data":"9bef72d4f11edd9532918d10723d2d66ce1e82b755c4a06e11933eff5d56eb1e"} Oct 13 21:29:53 crc kubenswrapper[4689]: I1013 21:29:53.403491 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-p9p9j" event={"ID":"13461759-7aa1-47ea-9bed-2346213bcde6","Type":"ContainerStarted","Data":"f7b25b5170f1ac70f7835751547840de7ea05d544660e17dc81e79ca8d38403d"} Oct 13 21:29:53 crc kubenswrapper[4689]: I1013 21:29:53.408091 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08fb7ee0-eee9-49f6-82e2-007abc19bd3b","Type":"ContainerStarted","Data":"ef44eb023d42520375a59ecd71210a4b769d37b287fb22702e195b82e5803f1a"} Oct 13 21:29:53 crc kubenswrapper[4689]: I1013 21:29:53.425560 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-p9p9j" podStartSLOduration=2.425539942 podStartE2EDuration="2.425539942s" podCreationTimestamp="2025-10-13 21:29:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:29:53.424788125 +0000 UTC m=+1110.343033210" watchObservedRunningTime="2025-10-13 21:29:53.425539942 +0000 UTC m=+1110.343785027" Oct 13 21:29:53 crc kubenswrapper[4689]: I1013 21:29:53.429147 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.429137208 podStartE2EDuration="3.429137208s" podCreationTimestamp="2025-10-13 21:29:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:29:52.411158316 +0000 UTC m=+1109.329403401" watchObservedRunningTime="2025-10-13 21:29:53.429137208 +0000 UTC m=+1110.347382293" Oct 13 21:29:53 crc kubenswrapper[4689]: I1013 21:29:53.809746 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:29:53 crc kubenswrapper[4689]: I1013 21:29:53.907503 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-2rg4h"] Oct 13 21:29:53 crc kubenswrapper[4689]: I1013 21:29:53.907798 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" podUID="1a79fbe5-a692-405a-88f9-4bd7f011ecad" containerName="dnsmasq-dns" containerID="cri-o://342be3253311bfd48d306c06adf2ce883aa8b9d82a07e52601825bfe9e933b74" gracePeriod=10 Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.435077 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08fb7ee0-eee9-49f6-82e2-007abc19bd3b","Type":"ContainerStarted","Data":"934fb7ea83ad2b28381a0828cd5fbbaf7ae36076a5edc2480e8223b89a01ae44"} Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.437705 4689 generic.go:334] "Generic (PLEG): container finished" podID="1a79fbe5-a692-405a-88f9-4bd7f011ecad" containerID="342be3253311bfd48d306c06adf2ce883aa8b9d82a07e52601825bfe9e933b74" exitCode=0 Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.438707 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" event={"ID":"1a79fbe5-a692-405a-88f9-4bd7f011ecad","Type":"ContainerDied","Data":"342be3253311bfd48d306c06adf2ce883aa8b9d82a07e52601825bfe9e933b74"} Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.438748 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" event={"ID":"1a79fbe5-a692-405a-88f9-4bd7f011ecad","Type":"ContainerDied","Data":"21f192a80fffb125339e7127cf728d99f5193479b80c4e01cea0c58b878570f2"} Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.438761 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21f192a80fffb125339e7127cf728d99f5193479b80c4e01cea0c58b878570f2" Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.465016 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.526520 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-dns-svc\") pod \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.526619 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52p4w\" (UniqueName: \"kubernetes.io/projected/1a79fbe5-a692-405a-88f9-4bd7f011ecad-kube-api-access-52p4w\") pod \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.526658 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-ovsdbserver-nb\") pod \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.526711 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-config\") pod \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.526735 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-dns-swift-storage-0\") pod \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.526768 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-ovsdbserver-sb\") pod \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\" (UID: \"1a79fbe5-a692-405a-88f9-4bd7f011ecad\") " Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.544205 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a79fbe5-a692-405a-88f9-4bd7f011ecad-kube-api-access-52p4w" (OuterVolumeSpecName: "kube-api-access-52p4w") pod "1a79fbe5-a692-405a-88f9-4bd7f011ecad" (UID: "1a79fbe5-a692-405a-88f9-4bd7f011ecad"). InnerVolumeSpecName "kube-api-access-52p4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.606262 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1a79fbe5-a692-405a-88f9-4bd7f011ecad" (UID: "1a79fbe5-a692-405a-88f9-4bd7f011ecad"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.607497 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-config" (OuterVolumeSpecName: "config") pod "1a79fbe5-a692-405a-88f9-4bd7f011ecad" (UID: "1a79fbe5-a692-405a-88f9-4bd7f011ecad"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.609094 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1a79fbe5-a692-405a-88f9-4bd7f011ecad" (UID: "1a79fbe5-a692-405a-88f9-4bd7f011ecad"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.624997 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1a79fbe5-a692-405a-88f9-4bd7f011ecad" (UID: "1a79fbe5-a692-405a-88f9-4bd7f011ecad"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.627739 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.627762 4689 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.627774 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.627784 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52p4w\" (UniqueName: \"kubernetes.io/projected/1a79fbe5-a692-405a-88f9-4bd7f011ecad-kube-api-access-52p4w\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.627794 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.636209 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1a79fbe5-a692-405a-88f9-4bd7f011ecad" (UID: "1a79fbe5-a692-405a-88f9-4bd7f011ecad"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:29:54 crc kubenswrapper[4689]: I1013 21:29:54.730114 4689 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a79fbe5-a692-405a-88f9-4bd7f011ecad-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:55 crc kubenswrapper[4689]: I1013 21:29:55.445991 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-2rg4h" Oct 13 21:29:55 crc kubenswrapper[4689]: I1013 21:29:55.620053 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-2rg4h"] Oct 13 21:29:55 crc kubenswrapper[4689]: I1013 21:29:55.631282 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-2rg4h"] Oct 13 21:29:55 crc kubenswrapper[4689]: I1013 21:29:55.876898 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a79fbe5-a692-405a-88f9-4bd7f011ecad" path="/var/lib/kubelet/pods/1a79fbe5-a692-405a-88f9-4bd7f011ecad/volumes" Oct 13 21:29:56 crc kubenswrapper[4689]: I1013 21:29:56.461257 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08fb7ee0-eee9-49f6-82e2-007abc19bd3b","Type":"ContainerStarted","Data":"2b51882396c183ddfd7ca138abb8fb7f5186035a3ddd90c1bbe23dfc939dedeb"} Oct 13 21:29:56 crc kubenswrapper[4689]: I1013 21:29:56.461703 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 13 21:29:56 crc kubenswrapper[4689]: I1013 21:29:56.492006 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.5521803050000003 podStartE2EDuration="6.491989302s" podCreationTimestamp="2025-10-13 21:29:50 +0000 UTC" firstStartedPulling="2025-10-13 21:29:51.485444403 +0000 UTC m=+1108.403689488" lastFinishedPulling="2025-10-13 21:29:55.4252534 +0000 UTC m=+1112.343498485" observedRunningTime="2025-10-13 21:29:56.487493586 +0000 UTC m=+1113.405738671" watchObservedRunningTime="2025-10-13 21:29:56.491989302 +0000 UTC m=+1113.410234387" Oct 13 21:29:57 crc kubenswrapper[4689]: I1013 21:29:57.471063 4689 generic.go:334] "Generic (PLEG): container finished" podID="13461759-7aa1-47ea-9bed-2346213bcde6" containerID="9bef72d4f11edd9532918d10723d2d66ce1e82b755c4a06e11933eff5d56eb1e" exitCode=0 Oct 13 21:29:57 crc kubenswrapper[4689]: I1013 21:29:57.471156 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-p9p9j" event={"ID":"13461759-7aa1-47ea-9bed-2346213bcde6","Type":"ContainerDied","Data":"9bef72d4f11edd9532918d10723d2d66ce1e82b755c4a06e11933eff5d56eb1e"} Oct 13 21:29:58 crc kubenswrapper[4689]: I1013 21:29:58.936135 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-p9p9j" Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.113274 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-combined-ca-bundle\") pod \"13461759-7aa1-47ea-9bed-2346213bcde6\" (UID: \"13461759-7aa1-47ea-9bed-2346213bcde6\") " Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.113455 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tnwl\" (UniqueName: \"kubernetes.io/projected/13461759-7aa1-47ea-9bed-2346213bcde6-kube-api-access-5tnwl\") pod \"13461759-7aa1-47ea-9bed-2346213bcde6\" (UID: \"13461759-7aa1-47ea-9bed-2346213bcde6\") " Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.114343 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-scripts\") pod \"13461759-7aa1-47ea-9bed-2346213bcde6\" (UID: \"13461759-7aa1-47ea-9bed-2346213bcde6\") " Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.114457 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-config-data\") pod \"13461759-7aa1-47ea-9bed-2346213bcde6\" (UID: \"13461759-7aa1-47ea-9bed-2346213bcde6\") " Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.119539 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13461759-7aa1-47ea-9bed-2346213bcde6-kube-api-access-5tnwl" (OuterVolumeSpecName: "kube-api-access-5tnwl") pod "13461759-7aa1-47ea-9bed-2346213bcde6" (UID: "13461759-7aa1-47ea-9bed-2346213bcde6"). InnerVolumeSpecName "kube-api-access-5tnwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.123964 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-scripts" (OuterVolumeSpecName: "scripts") pod "13461759-7aa1-47ea-9bed-2346213bcde6" (UID: "13461759-7aa1-47ea-9bed-2346213bcde6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.146933 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "13461759-7aa1-47ea-9bed-2346213bcde6" (UID: "13461759-7aa1-47ea-9bed-2346213bcde6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.164211 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-config-data" (OuterVolumeSpecName: "config-data") pod "13461759-7aa1-47ea-9bed-2346213bcde6" (UID: "13461759-7aa1-47ea-9bed-2346213bcde6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.217329 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tnwl\" (UniqueName: \"kubernetes.io/projected/13461759-7aa1-47ea-9bed-2346213bcde6-kube-api-access-5tnwl\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.217578 4689 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-scripts\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.217716 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.217871 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13461759-7aa1-47ea-9bed-2346213bcde6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.494709 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-p9p9j" event={"ID":"13461759-7aa1-47ea-9bed-2346213bcde6","Type":"ContainerDied","Data":"f7b25b5170f1ac70f7835751547840de7ea05d544660e17dc81e79ca8d38403d"} Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.494758 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7b25b5170f1ac70f7835751547840de7ea05d544660e17dc81e79ca8d38403d" Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.494821 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-p9p9j" Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.675626 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.676255 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3f844962-adc2-4345-b031-a5a7a9e003e3" containerName="nova-api-log" containerID="cri-o://ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce" gracePeriod=30 Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.676355 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3f844962-adc2-4345-b031-a5a7a9e003e3" containerName="nova-api-api" containerID="cri-o://58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d" gracePeriod=30 Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.745388 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.745674 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="67d3cb68-db98-4b94-91d1-bc032bf032bb" containerName="nova-scheduler-scheduler" containerID="cri-o://12722b2d017d6733423863d3726de683de10f8d204b46b6c49e9fd1eeca1e63d" gracePeriod=30 Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.758451 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.758797 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="352d4de6-8c1e-462f-a7f6-6c4f2955707d" containerName="nova-metadata-log" containerID="cri-o://51a9ee343c97ca23523c01d3267c8b99ba569e26576ef385a31b1c2c483ec1d6" gracePeriod=30 Oct 13 21:29:59 crc kubenswrapper[4689]: I1013 21:29:59.759107 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="352d4de6-8c1e-462f-a7f6-6c4f2955707d" containerName="nova-metadata-metadata" containerID="cri-o://e5446d890520594e7cd4f87b13adc12cdbe7156e8e05e3a1b8dcac4bb7cf3544" gracePeriod=30 Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.150999 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf"] Oct 13 21:30:00 crc kubenswrapper[4689]: E1013 21:30:00.153185 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a79fbe5-a692-405a-88f9-4bd7f011ecad" containerName="init" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.153214 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a79fbe5-a692-405a-88f9-4bd7f011ecad" containerName="init" Oct 13 21:30:00 crc kubenswrapper[4689]: E1013 21:30:00.153251 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13461759-7aa1-47ea-9bed-2346213bcde6" containerName="nova-manage" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.153263 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="13461759-7aa1-47ea-9bed-2346213bcde6" containerName="nova-manage" Oct 13 21:30:00 crc kubenswrapper[4689]: E1013 21:30:00.153293 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a79fbe5-a692-405a-88f9-4bd7f011ecad" containerName="dnsmasq-dns" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.153303 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a79fbe5-a692-405a-88f9-4bd7f011ecad" containerName="dnsmasq-dns" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.153631 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a79fbe5-a692-405a-88f9-4bd7f011ecad" containerName="dnsmasq-dns" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.153658 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="13461759-7aa1-47ea-9bed-2346213bcde6" containerName="nova-manage" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.155954 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.158157 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.158204 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.174929 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf"] Oct 13 21:30:00 crc kubenswrapper[4689]: E1013 21:30:00.318049 4689 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="12722b2d017d6733423863d3726de683de10f8d204b46b6c49e9fd1eeca1e63d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 13 21:30:00 crc kubenswrapper[4689]: E1013 21:30:00.323431 4689 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="12722b2d017d6733423863d3726de683de10f8d204b46b6c49e9fd1eeca1e63d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 13 21:30:00 crc kubenswrapper[4689]: E1013 21:30:00.330559 4689 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="12722b2d017d6733423863d3726de683de10f8d204b46b6c49e9fd1eeca1e63d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 13 21:30:00 crc kubenswrapper[4689]: E1013 21:30:00.330637 4689 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="67d3cb68-db98-4b94-91d1-bc032bf032bb" containerName="nova-scheduler-scheduler" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.336478 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7vn8\" (UniqueName: \"kubernetes.io/projected/30464507-4f21-4110-8238-9698b62b8fe3-kube-api-access-v7vn8\") pod \"collect-profiles-29339850-jcjzf\" (UID: \"30464507-4f21-4110-8238-9698b62b8fe3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.336821 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/30464507-4f21-4110-8238-9698b62b8fe3-secret-volume\") pod \"collect-profiles-29339850-jcjzf\" (UID: \"30464507-4f21-4110-8238-9698b62b8fe3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.337025 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/30464507-4f21-4110-8238-9698b62b8fe3-config-volume\") pod \"collect-profiles-29339850-jcjzf\" (UID: \"30464507-4f21-4110-8238-9698b62b8fe3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.351212 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.439116 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/30464507-4f21-4110-8238-9698b62b8fe3-config-volume\") pod \"collect-profiles-29339850-jcjzf\" (UID: \"30464507-4f21-4110-8238-9698b62b8fe3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.439159 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7vn8\" (UniqueName: \"kubernetes.io/projected/30464507-4f21-4110-8238-9698b62b8fe3-kube-api-access-v7vn8\") pod \"collect-profiles-29339850-jcjzf\" (UID: \"30464507-4f21-4110-8238-9698b62b8fe3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.439234 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/30464507-4f21-4110-8238-9698b62b8fe3-secret-volume\") pod \"collect-profiles-29339850-jcjzf\" (UID: \"30464507-4f21-4110-8238-9698b62b8fe3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.440209 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/30464507-4f21-4110-8238-9698b62b8fe3-config-volume\") pod \"collect-profiles-29339850-jcjzf\" (UID: \"30464507-4f21-4110-8238-9698b62b8fe3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.450309 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/30464507-4f21-4110-8238-9698b62b8fe3-secret-volume\") pod \"collect-profiles-29339850-jcjzf\" (UID: \"30464507-4f21-4110-8238-9698b62b8fe3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.457053 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7vn8\" (UniqueName: \"kubernetes.io/projected/30464507-4f21-4110-8238-9698b62b8fe3-kube-api-access-v7vn8\") pod \"collect-profiles-29339850-jcjzf\" (UID: \"30464507-4f21-4110-8238-9698b62b8fe3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.498351 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.513391 4689 generic.go:334] "Generic (PLEG): container finished" podID="3f844962-adc2-4345-b031-a5a7a9e003e3" containerID="58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d" exitCode=0 Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.513426 4689 generic.go:334] "Generic (PLEG): container finished" podID="3f844962-adc2-4345-b031-a5a7a9e003e3" containerID="ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce" exitCode=143 Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.513445 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.513477 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3f844962-adc2-4345-b031-a5a7a9e003e3","Type":"ContainerDied","Data":"58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d"} Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.513509 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3f844962-adc2-4345-b031-a5a7a9e003e3","Type":"ContainerDied","Data":"ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce"} Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.513520 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3f844962-adc2-4345-b031-a5a7a9e003e3","Type":"ContainerDied","Data":"bdb752e01c94c910b69dc54e5816d63fd312007f7d56ae23c73ea5b9b178dbbb"} Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.513535 4689 scope.go:117] "RemoveContainer" containerID="58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.523209 4689 generic.go:334] "Generic (PLEG): container finished" podID="352d4de6-8c1e-462f-a7f6-6c4f2955707d" containerID="51a9ee343c97ca23523c01d3267c8b99ba569e26576ef385a31b1c2c483ec1d6" exitCode=143 Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.523271 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"352d4de6-8c1e-462f-a7f6-6c4f2955707d","Type":"ContainerDied","Data":"51a9ee343c97ca23523c01d3267c8b99ba569e26576ef385a31b1c2c483ec1d6"} Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.540528 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8hdg\" (UniqueName: \"kubernetes.io/projected/3f844962-adc2-4345-b031-a5a7a9e003e3-kube-api-access-t8hdg\") pod \"3f844962-adc2-4345-b031-a5a7a9e003e3\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.540672 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f844962-adc2-4345-b031-a5a7a9e003e3-logs\") pod \"3f844962-adc2-4345-b031-a5a7a9e003e3\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.540714 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-combined-ca-bundle\") pod \"3f844962-adc2-4345-b031-a5a7a9e003e3\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.540789 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-config-data\") pod \"3f844962-adc2-4345-b031-a5a7a9e003e3\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.540858 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-public-tls-certs\") pod \"3f844962-adc2-4345-b031-a5a7a9e003e3\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.540879 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-internal-tls-certs\") pod \"3f844962-adc2-4345-b031-a5a7a9e003e3\" (UID: \"3f844962-adc2-4345-b031-a5a7a9e003e3\") " Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.541063 4689 scope.go:117] "RemoveContainer" containerID="ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.544064 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f844962-adc2-4345-b031-a5a7a9e003e3-logs" (OuterVolumeSpecName: "logs") pod "3f844962-adc2-4345-b031-a5a7a9e003e3" (UID: "3f844962-adc2-4345-b031-a5a7a9e003e3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.545789 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f844962-adc2-4345-b031-a5a7a9e003e3-kube-api-access-t8hdg" (OuterVolumeSpecName: "kube-api-access-t8hdg") pod "3f844962-adc2-4345-b031-a5a7a9e003e3" (UID: "3f844962-adc2-4345-b031-a5a7a9e003e3"). InnerVolumeSpecName "kube-api-access-t8hdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.564784 4689 scope.go:117] "RemoveContainer" containerID="58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d" Oct 13 21:30:00 crc kubenswrapper[4689]: E1013 21:30:00.565285 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d\": container with ID starting with 58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d not found: ID does not exist" containerID="58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.565337 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d"} err="failed to get container status \"58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d\": rpc error: code = NotFound desc = could not find container \"58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d\": container with ID starting with 58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d not found: ID does not exist" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.565370 4689 scope.go:117] "RemoveContainer" containerID="ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce" Oct 13 21:30:00 crc kubenswrapper[4689]: E1013 21:30:00.565675 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce\": container with ID starting with ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce not found: ID does not exist" containerID="ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.565706 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce"} err="failed to get container status \"ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce\": rpc error: code = NotFound desc = could not find container \"ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce\": container with ID starting with ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce not found: ID does not exist" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.565726 4689 scope.go:117] "RemoveContainer" containerID="58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.566195 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d"} err="failed to get container status \"58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d\": rpc error: code = NotFound desc = could not find container \"58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d\": container with ID starting with 58049dd53d68e793217002e03051e364ad99a270138a2693e605c06e2c44b61d not found: ID does not exist" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.566225 4689 scope.go:117] "RemoveContainer" containerID="ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.566448 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce"} err="failed to get container status \"ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce\": rpc error: code = NotFound desc = could not find container \"ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce\": container with ID starting with ec8e78130ada7d213014a2e1cb28c1adb467df734b3f3bcdc32e101235ede0ce not found: ID does not exist" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.573793 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3f844962-adc2-4345-b031-a5a7a9e003e3" (UID: "3f844962-adc2-4345-b031-a5a7a9e003e3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.576062 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-config-data" (OuterVolumeSpecName: "config-data") pod "3f844962-adc2-4345-b031-a5a7a9e003e3" (UID: "3f844962-adc2-4345-b031-a5a7a9e003e3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.598406 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3f844962-adc2-4345-b031-a5a7a9e003e3" (UID: "3f844962-adc2-4345-b031-a5a7a9e003e3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.607867 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3f844962-adc2-4345-b031-a5a7a9e003e3" (UID: "3f844962-adc2-4345-b031-a5a7a9e003e3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.642534 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8hdg\" (UniqueName: \"kubernetes.io/projected/3f844962-adc2-4345-b031-a5a7a9e003e3-kube-api-access-t8hdg\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.642567 4689 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f844962-adc2-4345-b031-a5a7a9e003e3-logs\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.642596 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.642609 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.642620 4689 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.642632 4689 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f844962-adc2-4345-b031-a5a7a9e003e3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.870950 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.880137 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.902029 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 13 21:30:00 crc kubenswrapper[4689]: E1013 21:30:00.902521 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f844962-adc2-4345-b031-a5a7a9e003e3" containerName="nova-api-log" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.902548 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f844962-adc2-4345-b031-a5a7a9e003e3" containerName="nova-api-log" Oct 13 21:30:00 crc kubenswrapper[4689]: E1013 21:30:00.902562 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f844962-adc2-4345-b031-a5a7a9e003e3" containerName="nova-api-api" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.902572 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f844962-adc2-4345-b031-a5a7a9e003e3" containerName="nova-api-api" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.902786 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f844962-adc2-4345-b031-a5a7a9e003e3" containerName="nova-api-log" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.902810 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f844962-adc2-4345-b031-a5a7a9e003e3" containerName="nova-api-api" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.903923 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.906000 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.906182 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.906931 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.913208 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.958302 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4625bed2-2e08-4399-a0a8-fcb62b4239bc-public-tls-certs\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.958418 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4625bed2-2e08-4399-a0a8-fcb62b4239bc-logs\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.958469 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4625bed2-2e08-4399-a0a8-fcb62b4239bc-config-data\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.958658 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4625bed2-2e08-4399-a0a8-fcb62b4239bc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.959008 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfq6k\" (UniqueName: \"kubernetes.io/projected/4625bed2-2e08-4399-a0a8-fcb62b4239bc-kube-api-access-wfq6k\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.959081 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4625bed2-2e08-4399-a0a8-fcb62b4239bc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:00 crc kubenswrapper[4689]: I1013 21:30:00.978444 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf"] Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.061401 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfq6k\" (UniqueName: \"kubernetes.io/projected/4625bed2-2e08-4399-a0a8-fcb62b4239bc-kube-api-access-wfq6k\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.061457 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4625bed2-2e08-4399-a0a8-fcb62b4239bc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.061483 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4625bed2-2e08-4399-a0a8-fcb62b4239bc-public-tls-certs\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.061534 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4625bed2-2e08-4399-a0a8-fcb62b4239bc-logs\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.061560 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4625bed2-2e08-4399-a0a8-fcb62b4239bc-config-data\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.061610 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4625bed2-2e08-4399-a0a8-fcb62b4239bc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.062017 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4625bed2-2e08-4399-a0a8-fcb62b4239bc-logs\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.067231 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4625bed2-2e08-4399-a0a8-fcb62b4239bc-config-data\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.067741 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4625bed2-2e08-4399-a0a8-fcb62b4239bc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.068480 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4625bed2-2e08-4399-a0a8-fcb62b4239bc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.071624 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4625bed2-2e08-4399-a0a8-fcb62b4239bc-public-tls-certs\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.078723 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfq6k\" (UniqueName: \"kubernetes.io/projected/4625bed2-2e08-4399-a0a8-fcb62b4239bc-kube-api-access-wfq6k\") pod \"nova-api-0\" (UID: \"4625bed2-2e08-4399-a0a8-fcb62b4239bc\") " pod="openstack/nova-api-0" Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.277309 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.534038 4689 generic.go:334] "Generic (PLEG): container finished" podID="30464507-4f21-4110-8238-9698b62b8fe3" containerID="5f8a2c628ff4e194a52ad839c7c45757039e1d6d34184a9a944fe93208f8afe3" exitCode=0 Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.534113 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf" event={"ID":"30464507-4f21-4110-8238-9698b62b8fe3","Type":"ContainerDied","Data":"5f8a2c628ff4e194a52ad839c7c45757039e1d6d34184a9a944fe93208f8afe3"} Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.534145 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf" event={"ID":"30464507-4f21-4110-8238-9698b62b8fe3","Type":"ContainerStarted","Data":"26fff3aa0779aba0f92fc8947f9e94a61b4d073e0bb6b974ef7d1240971207bc"} Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.701503 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 13 21:30:01 crc kubenswrapper[4689]: W1013 21:30:01.704082 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4625bed2_2e08_4399_a0a8_fcb62b4239bc.slice/crio-1178b2c99f4c7cf2f682872dcfd0b367476231d372e582244d0ae8498ebdd8a0 WatchSource:0}: Error finding container 1178b2c99f4c7cf2f682872dcfd0b367476231d372e582244d0ae8498ebdd8a0: Status 404 returned error can't find the container with id 1178b2c99f4c7cf2f682872dcfd0b367476231d372e582244d0ae8498ebdd8a0 Oct 13 21:30:01 crc kubenswrapper[4689]: I1013 21:30:01.881400 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f844962-adc2-4345-b031-a5a7a9e003e3" path="/var/lib/kubelet/pods/3f844962-adc2-4345-b031-a5a7a9e003e3/volumes" Oct 13 21:30:02 crc kubenswrapper[4689]: I1013 21:30:02.557939 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4625bed2-2e08-4399-a0a8-fcb62b4239bc","Type":"ContainerStarted","Data":"e814ce576578c0fd0c2f2b715f41a9a9680f4b78b12788472df3f0fdd48c4853"} Oct 13 21:30:02 crc kubenswrapper[4689]: I1013 21:30:02.558016 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4625bed2-2e08-4399-a0a8-fcb62b4239bc","Type":"ContainerStarted","Data":"c3defb06a3f2c16e6dff513f6518b31eb53da418484afa90e1ce39b8dd0ca24d"} Oct 13 21:30:02 crc kubenswrapper[4689]: I1013 21:30:02.558040 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4625bed2-2e08-4399-a0a8-fcb62b4239bc","Type":"ContainerStarted","Data":"1178b2c99f4c7cf2f682872dcfd0b367476231d372e582244d0ae8498ebdd8a0"} Oct 13 21:30:02 crc kubenswrapper[4689]: I1013 21:30:02.580676 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.580658739 podStartE2EDuration="2.580658739s" podCreationTimestamp="2025-10-13 21:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:30:02.580077846 +0000 UTC m=+1119.498322931" watchObservedRunningTime="2025-10-13 21:30:02.580658739 +0000 UTC m=+1119.498903824" Oct 13 21:30:02 crc kubenswrapper[4689]: I1013 21:30:02.886467 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="352d4de6-8c1e-462f-a7f6-6c4f2955707d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": read tcp 10.217.0.2:50572->10.217.0.196:8775: read: connection reset by peer" Oct 13 21:30:02 crc kubenswrapper[4689]: I1013 21:30:02.887402 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="352d4de6-8c1e-462f-a7f6-6c4f2955707d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": read tcp 10.217.0.2:50570->10.217.0.196:8775: read: connection reset by peer" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.032082 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.199564 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/30464507-4f21-4110-8238-9698b62b8fe3-secret-volume\") pod \"30464507-4f21-4110-8238-9698b62b8fe3\" (UID: \"30464507-4f21-4110-8238-9698b62b8fe3\") " Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.199759 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/30464507-4f21-4110-8238-9698b62b8fe3-config-volume\") pod \"30464507-4f21-4110-8238-9698b62b8fe3\" (UID: \"30464507-4f21-4110-8238-9698b62b8fe3\") " Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.199803 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7vn8\" (UniqueName: \"kubernetes.io/projected/30464507-4f21-4110-8238-9698b62b8fe3-kube-api-access-v7vn8\") pod \"30464507-4f21-4110-8238-9698b62b8fe3\" (UID: \"30464507-4f21-4110-8238-9698b62b8fe3\") " Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.200898 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30464507-4f21-4110-8238-9698b62b8fe3-config-volume" (OuterVolumeSpecName: "config-volume") pod "30464507-4f21-4110-8238-9698b62b8fe3" (UID: "30464507-4f21-4110-8238-9698b62b8fe3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.206166 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30464507-4f21-4110-8238-9698b62b8fe3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "30464507-4f21-4110-8238-9698b62b8fe3" (UID: "30464507-4f21-4110-8238-9698b62b8fe3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.206817 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30464507-4f21-4110-8238-9698b62b8fe3-kube-api-access-v7vn8" (OuterVolumeSpecName: "kube-api-access-v7vn8") pod "30464507-4f21-4110-8238-9698b62b8fe3" (UID: "30464507-4f21-4110-8238-9698b62b8fe3"). InnerVolumeSpecName "kube-api-access-v7vn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.301894 4689 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/30464507-4f21-4110-8238-9698b62b8fe3-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.301931 4689 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/30464507-4f21-4110-8238-9698b62b8fe3-config-volume\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.301943 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7vn8\" (UniqueName: \"kubernetes.io/projected/30464507-4f21-4110-8238-9698b62b8fe3-kube-api-access-v7vn8\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.314285 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.508307 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-config-data\") pod \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.508411 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/352d4de6-8c1e-462f-a7f6-6c4f2955707d-logs\") pod \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.508444 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmt67\" (UniqueName: \"kubernetes.io/projected/352d4de6-8c1e-462f-a7f6-6c4f2955707d-kube-api-access-pmt67\") pod \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.508487 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-nova-metadata-tls-certs\") pod \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.508726 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-combined-ca-bundle\") pod \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\" (UID: \"352d4de6-8c1e-462f-a7f6-6c4f2955707d\") " Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.509091 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/352d4de6-8c1e-462f-a7f6-6c4f2955707d-logs" (OuterVolumeSpecName: "logs") pod "352d4de6-8c1e-462f-a7f6-6c4f2955707d" (UID: "352d4de6-8c1e-462f-a7f6-6c4f2955707d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.509269 4689 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/352d4de6-8c1e-462f-a7f6-6c4f2955707d-logs\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.512123 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/352d4de6-8c1e-462f-a7f6-6c4f2955707d-kube-api-access-pmt67" (OuterVolumeSpecName: "kube-api-access-pmt67") pod "352d4de6-8c1e-462f-a7f6-6c4f2955707d" (UID: "352d4de6-8c1e-462f-a7f6-6c4f2955707d"). InnerVolumeSpecName "kube-api-access-pmt67". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.533680 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-config-data" (OuterVolumeSpecName: "config-data") pod "352d4de6-8c1e-462f-a7f6-6c4f2955707d" (UID: "352d4de6-8c1e-462f-a7f6-6c4f2955707d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.552075 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "352d4de6-8c1e-462f-a7f6-6c4f2955707d" (UID: "352d4de6-8c1e-462f-a7f6-6c4f2955707d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.558062 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "352d4de6-8c1e-462f-a7f6-6c4f2955707d" (UID: "352d4de6-8c1e-462f-a7f6-6c4f2955707d"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.568490 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.568489 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf" event={"ID":"30464507-4f21-4110-8238-9698b62b8fe3","Type":"ContainerDied","Data":"26fff3aa0779aba0f92fc8947f9e94a61b4d073e0bb6b974ef7d1240971207bc"} Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.568697 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26fff3aa0779aba0f92fc8947f9e94a61b4d073e0bb6b974ef7d1240971207bc" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.571421 4689 generic.go:334] "Generic (PLEG): container finished" podID="352d4de6-8c1e-462f-a7f6-6c4f2955707d" containerID="e5446d890520594e7cd4f87b13adc12cdbe7156e8e05e3a1b8dcac4bb7cf3544" exitCode=0 Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.572364 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.575873 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"352d4de6-8c1e-462f-a7f6-6c4f2955707d","Type":"ContainerDied","Data":"e5446d890520594e7cd4f87b13adc12cdbe7156e8e05e3a1b8dcac4bb7cf3544"} Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.576160 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"352d4de6-8c1e-462f-a7f6-6c4f2955707d","Type":"ContainerDied","Data":"aac86f0b8a0556f23d73d606b6f626b6935e8a4e6ea25c11dae4de5ec526e92d"} Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.576254 4689 scope.go:117] "RemoveContainer" containerID="e5446d890520594e7cd4f87b13adc12cdbe7156e8e05e3a1b8dcac4bb7cf3544" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.611924 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.612058 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.612491 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmt67\" (UniqueName: \"kubernetes.io/projected/352d4de6-8c1e-462f-a7f6-6c4f2955707d-kube-api-access-pmt67\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.612528 4689 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/352d4de6-8c1e-462f-a7f6-6c4f2955707d-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.638307 4689 scope.go:117] "RemoveContainer" containerID="51a9ee343c97ca23523c01d3267c8b99ba569e26576ef385a31b1c2c483ec1d6" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.638769 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.651036 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.665858 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:30:03 crc kubenswrapper[4689]: E1013 21:30:03.666277 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30464507-4f21-4110-8238-9698b62b8fe3" containerName="collect-profiles" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.666290 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="30464507-4f21-4110-8238-9698b62b8fe3" containerName="collect-profiles" Oct 13 21:30:03 crc kubenswrapper[4689]: E1013 21:30:03.666313 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="352d4de6-8c1e-462f-a7f6-6c4f2955707d" containerName="nova-metadata-metadata" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.666319 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="352d4de6-8c1e-462f-a7f6-6c4f2955707d" containerName="nova-metadata-metadata" Oct 13 21:30:03 crc kubenswrapper[4689]: E1013 21:30:03.666335 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="352d4de6-8c1e-462f-a7f6-6c4f2955707d" containerName="nova-metadata-log" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.666343 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="352d4de6-8c1e-462f-a7f6-6c4f2955707d" containerName="nova-metadata-log" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.668746 4689 scope.go:117] "RemoveContainer" containerID="e5446d890520594e7cd4f87b13adc12cdbe7156e8e05e3a1b8dcac4bb7cf3544" Oct 13 21:30:03 crc kubenswrapper[4689]: E1013 21:30:03.669277 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5446d890520594e7cd4f87b13adc12cdbe7156e8e05e3a1b8dcac4bb7cf3544\": container with ID starting with e5446d890520594e7cd4f87b13adc12cdbe7156e8e05e3a1b8dcac4bb7cf3544 not found: ID does not exist" containerID="e5446d890520594e7cd4f87b13adc12cdbe7156e8e05e3a1b8dcac4bb7cf3544" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.669325 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5446d890520594e7cd4f87b13adc12cdbe7156e8e05e3a1b8dcac4bb7cf3544"} err="failed to get container status \"e5446d890520594e7cd4f87b13adc12cdbe7156e8e05e3a1b8dcac4bb7cf3544\": rpc error: code = NotFound desc = could not find container \"e5446d890520594e7cd4f87b13adc12cdbe7156e8e05e3a1b8dcac4bb7cf3544\": container with ID starting with e5446d890520594e7cd4f87b13adc12cdbe7156e8e05e3a1b8dcac4bb7cf3544 not found: ID does not exist" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.669351 4689 scope.go:117] "RemoveContainer" containerID="51a9ee343c97ca23523c01d3267c8b99ba569e26576ef385a31b1c2c483ec1d6" Oct 13 21:30:03 crc kubenswrapper[4689]: E1013 21:30:03.669673 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51a9ee343c97ca23523c01d3267c8b99ba569e26576ef385a31b1c2c483ec1d6\": container with ID starting with 51a9ee343c97ca23523c01d3267c8b99ba569e26576ef385a31b1c2c483ec1d6 not found: ID does not exist" containerID="51a9ee343c97ca23523c01d3267c8b99ba569e26576ef385a31b1c2c483ec1d6" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.669705 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51a9ee343c97ca23523c01d3267c8b99ba569e26576ef385a31b1c2c483ec1d6"} err="failed to get container status \"51a9ee343c97ca23523c01d3267c8b99ba569e26576ef385a31b1c2c483ec1d6\": rpc error: code = NotFound desc = could not find container \"51a9ee343c97ca23523c01d3267c8b99ba569e26576ef385a31b1c2c483ec1d6\": container with ID starting with 51a9ee343c97ca23523c01d3267c8b99ba569e26576ef385a31b1c2c483ec1d6 not found: ID does not exist" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.710973 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="30464507-4f21-4110-8238-9698b62b8fe3" containerName="collect-profiles" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.711070 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="352d4de6-8c1e-462f-a7f6-6c4f2955707d" containerName="nova-metadata-metadata" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.711099 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="352d4de6-8c1e-462f-a7f6-6c4f2955707d" containerName="nova-metadata-log" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.712623 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.713231 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.715339 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd6f475e-e9b6-421d-9897-1b5a8a748a2a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dd6f475e-e9b6-421d-9897-1b5a8a748a2a\") " pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.715400 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd6f475e-e9b6-421d-9897-1b5a8a748a2a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"dd6f475e-e9b6-421d-9897-1b5a8a748a2a\") " pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.715464 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd6f475e-e9b6-421d-9897-1b5a8a748a2a-config-data\") pod \"nova-metadata-0\" (UID: \"dd6f475e-e9b6-421d-9897-1b5a8a748a2a\") " pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.715495 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd6f475e-e9b6-421d-9897-1b5a8a748a2a-logs\") pod \"nova-metadata-0\" (UID: \"dd6f475e-e9b6-421d-9897-1b5a8a748a2a\") " pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.715554 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.715570 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x99c4\" (UniqueName: \"kubernetes.io/projected/dd6f475e-e9b6-421d-9897-1b5a8a748a2a-kube-api-access-x99c4\") pod \"nova-metadata-0\" (UID: \"dd6f475e-e9b6-421d-9897-1b5a8a748a2a\") " pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.715687 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.816536 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd6f475e-e9b6-421d-9897-1b5a8a748a2a-logs\") pod \"nova-metadata-0\" (UID: \"dd6f475e-e9b6-421d-9897-1b5a8a748a2a\") " pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.816906 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x99c4\" (UniqueName: \"kubernetes.io/projected/dd6f475e-e9b6-421d-9897-1b5a8a748a2a-kube-api-access-x99c4\") pod \"nova-metadata-0\" (UID: \"dd6f475e-e9b6-421d-9897-1b5a8a748a2a\") " pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.816989 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd6f475e-e9b6-421d-9897-1b5a8a748a2a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dd6f475e-e9b6-421d-9897-1b5a8a748a2a\") " pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.817010 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd6f475e-e9b6-421d-9897-1b5a8a748a2a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"dd6f475e-e9b6-421d-9897-1b5a8a748a2a\") " pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.817052 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd6f475e-e9b6-421d-9897-1b5a8a748a2a-config-data\") pod \"nova-metadata-0\" (UID: \"dd6f475e-e9b6-421d-9897-1b5a8a748a2a\") " pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.817388 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd6f475e-e9b6-421d-9897-1b5a8a748a2a-logs\") pod \"nova-metadata-0\" (UID: \"dd6f475e-e9b6-421d-9897-1b5a8a748a2a\") " pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.821295 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd6f475e-e9b6-421d-9897-1b5a8a748a2a-config-data\") pod \"nova-metadata-0\" (UID: \"dd6f475e-e9b6-421d-9897-1b5a8a748a2a\") " pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.821300 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd6f475e-e9b6-421d-9897-1b5a8a748a2a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"dd6f475e-e9b6-421d-9897-1b5a8a748a2a\") " pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.822768 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd6f475e-e9b6-421d-9897-1b5a8a748a2a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dd6f475e-e9b6-421d-9897-1b5a8a748a2a\") " pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.842077 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x99c4\" (UniqueName: \"kubernetes.io/projected/dd6f475e-e9b6-421d-9897-1b5a8a748a2a-kube-api-access-x99c4\") pod \"nova-metadata-0\" (UID: \"dd6f475e-e9b6-421d-9897-1b5a8a748a2a\") " pod="openstack/nova-metadata-0" Oct 13 21:30:03 crc kubenswrapper[4689]: I1013 21:30:03.882444 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="352d4de6-8c1e-462f-a7f6-6c4f2955707d" path="/var/lib/kubelet/pods/352d4de6-8c1e-462f-a7f6-6c4f2955707d/volumes" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.045671 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.237786 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.429545 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67d3cb68-db98-4b94-91d1-bc032bf032bb-config-data\") pod \"67d3cb68-db98-4b94-91d1-bc032bf032bb\" (UID: \"67d3cb68-db98-4b94-91d1-bc032bf032bb\") " Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.429880 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67d3cb68-db98-4b94-91d1-bc032bf032bb-combined-ca-bundle\") pod \"67d3cb68-db98-4b94-91d1-bc032bf032bb\" (UID: \"67d3cb68-db98-4b94-91d1-bc032bf032bb\") " Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.430068 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2cszz\" (UniqueName: \"kubernetes.io/projected/67d3cb68-db98-4b94-91d1-bc032bf032bb-kube-api-access-2cszz\") pod \"67d3cb68-db98-4b94-91d1-bc032bf032bb\" (UID: \"67d3cb68-db98-4b94-91d1-bc032bf032bb\") " Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.434301 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67d3cb68-db98-4b94-91d1-bc032bf032bb-kube-api-access-2cszz" (OuterVolumeSpecName: "kube-api-access-2cszz") pod "67d3cb68-db98-4b94-91d1-bc032bf032bb" (UID: "67d3cb68-db98-4b94-91d1-bc032bf032bb"). InnerVolumeSpecName "kube-api-access-2cszz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.473081 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67d3cb68-db98-4b94-91d1-bc032bf032bb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "67d3cb68-db98-4b94-91d1-bc032bf032bb" (UID: "67d3cb68-db98-4b94-91d1-bc032bf032bb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.473508 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67d3cb68-db98-4b94-91d1-bc032bf032bb-config-data" (OuterVolumeSpecName: "config-data") pod "67d3cb68-db98-4b94-91d1-bc032bf032bb" (UID: "67d3cb68-db98-4b94-91d1-bc032bf032bb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.492381 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.533840 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2cszz\" (UniqueName: \"kubernetes.io/projected/67d3cb68-db98-4b94-91d1-bc032bf032bb-kube-api-access-2cszz\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.533904 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67d3cb68-db98-4b94-91d1-bc032bf032bb-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.533927 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67d3cb68-db98-4b94-91d1-bc032bf032bb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.583563 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dd6f475e-e9b6-421d-9897-1b5a8a748a2a","Type":"ContainerStarted","Data":"d8a64e8b883a1d0b2f309922dc9df4a7054ab038abe1b5e21069242548f20403"} Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.587845 4689 generic.go:334] "Generic (PLEG): container finished" podID="67d3cb68-db98-4b94-91d1-bc032bf032bb" containerID="12722b2d017d6733423863d3726de683de10f8d204b46b6c49e9fd1eeca1e63d" exitCode=0 Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.587889 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.587888 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"67d3cb68-db98-4b94-91d1-bc032bf032bb","Type":"ContainerDied","Data":"12722b2d017d6733423863d3726de683de10f8d204b46b6c49e9fd1eeca1e63d"} Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.588005 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"67d3cb68-db98-4b94-91d1-bc032bf032bb","Type":"ContainerDied","Data":"eed87d3020e8e59a6887753c25ebec3c0647e655279d94ec401601ce308865f8"} Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.588026 4689 scope.go:117] "RemoveContainer" containerID="12722b2d017d6733423863d3726de683de10f8d204b46b6c49e9fd1eeca1e63d" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.608853 4689 scope.go:117] "RemoveContainer" containerID="12722b2d017d6733423863d3726de683de10f8d204b46b6c49e9fd1eeca1e63d" Oct 13 21:30:04 crc kubenswrapper[4689]: E1013 21:30:04.609401 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12722b2d017d6733423863d3726de683de10f8d204b46b6c49e9fd1eeca1e63d\": container with ID starting with 12722b2d017d6733423863d3726de683de10f8d204b46b6c49e9fd1eeca1e63d not found: ID does not exist" containerID="12722b2d017d6733423863d3726de683de10f8d204b46b6c49e9fd1eeca1e63d" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.609453 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12722b2d017d6733423863d3726de683de10f8d204b46b6c49e9fd1eeca1e63d"} err="failed to get container status \"12722b2d017d6733423863d3726de683de10f8d204b46b6c49e9fd1eeca1e63d\": rpc error: code = NotFound desc = could not find container \"12722b2d017d6733423863d3726de683de10f8d204b46b6c49e9fd1eeca1e63d\": container with ID starting with 12722b2d017d6733423863d3726de683de10f8d204b46b6c49e9fd1eeca1e63d not found: ID does not exist" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.618811 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.642724 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.663602 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 21:30:04 crc kubenswrapper[4689]: E1013 21:30:04.664086 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67d3cb68-db98-4b94-91d1-bc032bf032bb" containerName="nova-scheduler-scheduler" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.664108 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="67d3cb68-db98-4b94-91d1-bc032bf032bb" containerName="nova-scheduler-scheduler" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.664345 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="67d3cb68-db98-4b94-91d1-bc032bf032bb" containerName="nova-scheduler-scheduler" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.665051 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.666742 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.685002 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.737234 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06de4aaa-8949-449e-bb2f-65f4cffa4954-config-data\") pod \"nova-scheduler-0\" (UID: \"06de4aaa-8949-449e-bb2f-65f4cffa4954\") " pod="openstack/nova-scheduler-0" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.737338 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zm7k\" (UniqueName: \"kubernetes.io/projected/06de4aaa-8949-449e-bb2f-65f4cffa4954-kube-api-access-7zm7k\") pod \"nova-scheduler-0\" (UID: \"06de4aaa-8949-449e-bb2f-65f4cffa4954\") " pod="openstack/nova-scheduler-0" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.737376 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06de4aaa-8949-449e-bb2f-65f4cffa4954-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"06de4aaa-8949-449e-bb2f-65f4cffa4954\") " pod="openstack/nova-scheduler-0" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.838777 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zm7k\" (UniqueName: \"kubernetes.io/projected/06de4aaa-8949-449e-bb2f-65f4cffa4954-kube-api-access-7zm7k\") pod \"nova-scheduler-0\" (UID: \"06de4aaa-8949-449e-bb2f-65f4cffa4954\") " pod="openstack/nova-scheduler-0" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.838887 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06de4aaa-8949-449e-bb2f-65f4cffa4954-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"06de4aaa-8949-449e-bb2f-65f4cffa4954\") " pod="openstack/nova-scheduler-0" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.838976 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06de4aaa-8949-449e-bb2f-65f4cffa4954-config-data\") pod \"nova-scheduler-0\" (UID: \"06de4aaa-8949-449e-bb2f-65f4cffa4954\") " pod="openstack/nova-scheduler-0" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.843038 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06de4aaa-8949-449e-bb2f-65f4cffa4954-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"06de4aaa-8949-449e-bb2f-65f4cffa4954\") " pod="openstack/nova-scheduler-0" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.843046 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06de4aaa-8949-449e-bb2f-65f4cffa4954-config-data\") pod \"nova-scheduler-0\" (UID: \"06de4aaa-8949-449e-bb2f-65f4cffa4954\") " pod="openstack/nova-scheduler-0" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.859855 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zm7k\" (UniqueName: \"kubernetes.io/projected/06de4aaa-8949-449e-bb2f-65f4cffa4954-kube-api-access-7zm7k\") pod \"nova-scheduler-0\" (UID: \"06de4aaa-8949-449e-bb2f-65f4cffa4954\") " pod="openstack/nova-scheduler-0" Oct 13 21:30:04 crc kubenswrapper[4689]: I1013 21:30:04.984941 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 13 21:30:05 crc kubenswrapper[4689]: I1013 21:30:05.438323 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 13 21:30:05 crc kubenswrapper[4689]: W1013 21:30:05.442033 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod06de4aaa_8949_449e_bb2f_65f4cffa4954.slice/crio-668f7eb084830adf95c580d432efe850fddcb716bf6ac76f263774f603b8914b WatchSource:0}: Error finding container 668f7eb084830adf95c580d432efe850fddcb716bf6ac76f263774f603b8914b: Status 404 returned error can't find the container with id 668f7eb084830adf95c580d432efe850fddcb716bf6ac76f263774f603b8914b Oct 13 21:30:05 crc kubenswrapper[4689]: I1013 21:30:05.600114 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"06de4aaa-8949-449e-bb2f-65f4cffa4954","Type":"ContainerStarted","Data":"668f7eb084830adf95c580d432efe850fddcb716bf6ac76f263774f603b8914b"} Oct 13 21:30:05 crc kubenswrapper[4689]: I1013 21:30:05.601740 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dd6f475e-e9b6-421d-9897-1b5a8a748a2a","Type":"ContainerStarted","Data":"475f57ebf6e0a11c1cd65b0986a619ea7312061084fb70a69eb3a86d4937603d"} Oct 13 21:30:05 crc kubenswrapper[4689]: I1013 21:30:05.601852 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dd6f475e-e9b6-421d-9897-1b5a8a748a2a","Type":"ContainerStarted","Data":"a6db1c2abc3b99502411d156d1d25ec5184ac40210cc331fc8635f006dea6e05"} Oct 13 21:30:05 crc kubenswrapper[4689]: I1013 21:30:05.629309 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.629293598 podStartE2EDuration="2.629293598s" podCreationTimestamp="2025-10-13 21:30:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:30:05.616049626 +0000 UTC m=+1122.534294721" watchObservedRunningTime="2025-10-13 21:30:05.629293598 +0000 UTC m=+1122.547538683" Oct 13 21:30:05 crc kubenswrapper[4689]: I1013 21:30:05.878041 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67d3cb68-db98-4b94-91d1-bc032bf032bb" path="/var/lib/kubelet/pods/67d3cb68-db98-4b94-91d1-bc032bf032bb/volumes" Oct 13 21:30:06 crc kubenswrapper[4689]: I1013 21:30:06.615197 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"06de4aaa-8949-449e-bb2f-65f4cffa4954","Type":"ContainerStarted","Data":"e07603258637ee5a0b852971b208332a1d8f9cf5bffff865069522c02783c576"} Oct 13 21:30:06 crc kubenswrapper[4689]: I1013 21:30:06.643366 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.643342378 podStartE2EDuration="2.643342378s" podCreationTimestamp="2025-10-13 21:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:30:06.64173986 +0000 UTC m=+1123.559984975" watchObservedRunningTime="2025-10-13 21:30:06.643342378 +0000 UTC m=+1123.561587463" Oct 13 21:30:09 crc kubenswrapper[4689]: I1013 21:30:09.046682 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 13 21:30:09 crc kubenswrapper[4689]: I1013 21:30:09.047044 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 13 21:30:09 crc kubenswrapper[4689]: I1013 21:30:09.986027 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 13 21:30:11 crc kubenswrapper[4689]: I1013 21:30:11.278259 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 13 21:30:11 crc kubenswrapper[4689]: I1013 21:30:11.278674 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 13 21:30:12 crc kubenswrapper[4689]: I1013 21:30:12.296867 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4625bed2-2e08-4399-a0a8-fcb62b4239bc" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.208:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 13 21:30:12 crc kubenswrapper[4689]: I1013 21:30:12.296877 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4625bed2-2e08-4399-a0a8-fcb62b4239bc" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.208:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 13 21:30:14 crc kubenswrapper[4689]: I1013 21:30:14.047079 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 13 21:30:14 crc kubenswrapper[4689]: I1013 21:30:14.047333 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 13 21:30:14 crc kubenswrapper[4689]: I1013 21:30:14.986709 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 13 21:30:15 crc kubenswrapper[4689]: I1013 21:30:15.020382 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 13 21:30:15 crc kubenswrapper[4689]: I1013 21:30:15.058738 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="dd6f475e-e9b6-421d-9897-1b5a8a748a2a" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.209:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 13 21:30:15 crc kubenswrapper[4689]: I1013 21:30:15.058760 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="dd6f475e-e9b6-421d-9897-1b5a8a748a2a" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.209:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 13 21:30:15 crc kubenswrapper[4689]: I1013 21:30:15.742302 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 13 21:30:20 crc kubenswrapper[4689]: I1013 21:30:20.970987 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 13 21:30:21 crc kubenswrapper[4689]: I1013 21:30:21.285742 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 13 21:30:21 crc kubenswrapper[4689]: I1013 21:30:21.286317 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 13 21:30:21 crc kubenswrapper[4689]: I1013 21:30:21.286839 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 13 21:30:21 crc kubenswrapper[4689]: I1013 21:30:21.292673 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 13 21:30:21 crc kubenswrapper[4689]: I1013 21:30:21.771380 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 13 21:30:21 crc kubenswrapper[4689]: I1013 21:30:21.777788 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 13 21:30:24 crc kubenswrapper[4689]: I1013 21:30:24.051656 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 13 21:30:24 crc kubenswrapper[4689]: I1013 21:30:24.053954 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 13 21:30:24 crc kubenswrapper[4689]: I1013 21:30:24.057265 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 13 21:30:24 crc kubenswrapper[4689]: I1013 21:30:24.802054 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 13 21:30:32 crc kubenswrapper[4689]: I1013 21:30:32.298563 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 21:30:33 crc kubenswrapper[4689]: I1013 21:30:33.115901 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 21:30:36 crc kubenswrapper[4689]: I1013 21:30:36.304313 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="4fa622b7-d774-4b55-a3e7-2053625177ca" containerName="rabbitmq" containerID="cri-o://5eb926dc5ecd9f988b8a26cc3c8d8a45f55fd8ea1687ce1aa3db23ea76500b7a" gracePeriod=604796 Oct 13 21:30:36 crc kubenswrapper[4689]: I1013 21:30:36.449935 4689 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="4fa622b7-d774-4b55-a3e7-2053625177ca" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.99:5671: connect: connection refused" Oct 13 21:30:37 crc kubenswrapper[4689]: I1013 21:30:37.494146 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="b974f9f4-057e-4a9c-9835-a9636d5601f8" containerName="rabbitmq" containerID="cri-o://e181ed88165040f3f0ece477fd380d3b5203e300d2172dfa5d1a168538ec6a3f" gracePeriod=604796 Oct 13 21:30:42 crc kubenswrapper[4689]: I1013 21:30:42.939855 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 13 21:30:42 crc kubenswrapper[4689]: I1013 21:30:42.978430 4689 generic.go:334] "Generic (PLEG): container finished" podID="4fa622b7-d774-4b55-a3e7-2053625177ca" containerID="5eb926dc5ecd9f988b8a26cc3c8d8a45f55fd8ea1687ce1aa3db23ea76500b7a" exitCode=0 Oct 13 21:30:42 crc kubenswrapper[4689]: I1013 21:30:42.978525 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 13 21:30:42 crc kubenswrapper[4689]: I1013 21:30:42.978510 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4fa622b7-d774-4b55-a3e7-2053625177ca","Type":"ContainerDied","Data":"5eb926dc5ecd9f988b8a26cc3c8d8a45f55fd8ea1687ce1aa3db23ea76500b7a"} Oct 13 21:30:42 crc kubenswrapper[4689]: I1013 21:30:42.979134 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4fa622b7-d774-4b55-a3e7-2053625177ca","Type":"ContainerDied","Data":"380b9294d9531efa033fee30a5d847a46b3036637a1fd104f40125feabf20b72"} Oct 13 21:30:42 crc kubenswrapper[4689]: I1013 21:30:42.979163 4689 scope.go:117] "RemoveContainer" containerID="5eb926dc5ecd9f988b8a26cc3c8d8a45f55fd8ea1687ce1aa3db23ea76500b7a" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.020951 4689 scope.go:117] "RemoveContainer" containerID="6d93e24af37eb1e0abc87c994aeea6fb5043a90cb62d744977d5530b09573762" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.052972 4689 scope.go:117] "RemoveContainer" containerID="5eb926dc5ecd9f988b8a26cc3c8d8a45f55fd8ea1687ce1aa3db23ea76500b7a" Oct 13 21:30:43 crc kubenswrapper[4689]: E1013 21:30:43.053734 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5eb926dc5ecd9f988b8a26cc3c8d8a45f55fd8ea1687ce1aa3db23ea76500b7a\": container with ID starting with 5eb926dc5ecd9f988b8a26cc3c8d8a45f55fd8ea1687ce1aa3db23ea76500b7a not found: ID does not exist" containerID="5eb926dc5ecd9f988b8a26cc3c8d8a45f55fd8ea1687ce1aa3db23ea76500b7a" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.053788 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5eb926dc5ecd9f988b8a26cc3c8d8a45f55fd8ea1687ce1aa3db23ea76500b7a"} err="failed to get container status \"5eb926dc5ecd9f988b8a26cc3c8d8a45f55fd8ea1687ce1aa3db23ea76500b7a\": rpc error: code = NotFound desc = could not find container \"5eb926dc5ecd9f988b8a26cc3c8d8a45f55fd8ea1687ce1aa3db23ea76500b7a\": container with ID starting with 5eb926dc5ecd9f988b8a26cc3c8d8a45f55fd8ea1687ce1aa3db23ea76500b7a not found: ID does not exist" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.053815 4689 scope.go:117] "RemoveContainer" containerID="6d93e24af37eb1e0abc87c994aeea6fb5043a90cb62d744977d5530b09573762" Oct 13 21:30:43 crc kubenswrapper[4689]: E1013 21:30:43.055334 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d93e24af37eb1e0abc87c994aeea6fb5043a90cb62d744977d5530b09573762\": container with ID starting with 6d93e24af37eb1e0abc87c994aeea6fb5043a90cb62d744977d5530b09573762 not found: ID does not exist" containerID="6d93e24af37eb1e0abc87c994aeea6fb5043a90cb62d744977d5530b09573762" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.055366 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d93e24af37eb1e0abc87c994aeea6fb5043a90cb62d744977d5530b09573762"} err="failed to get container status \"6d93e24af37eb1e0abc87c994aeea6fb5043a90cb62d744977d5530b09573762\": rpc error: code = NotFound desc = could not find container \"6d93e24af37eb1e0abc87c994aeea6fb5043a90cb62d744977d5530b09573762\": container with ID starting with 6d93e24af37eb1e0abc87c994aeea6fb5043a90cb62d744977d5530b09573762 not found: ID does not exist" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.080084 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4fa622b7-d774-4b55-a3e7-2053625177ca-erlang-cookie-secret\") pod \"4fa622b7-d774-4b55-a3e7-2053625177ca\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.080666 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8fsz\" (UniqueName: \"kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-kube-api-access-c8fsz\") pod \"4fa622b7-d774-4b55-a3e7-2053625177ca\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.080720 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-erlang-cookie\") pod \"4fa622b7-d774-4b55-a3e7-2053625177ca\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.080767 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-plugins\") pod \"4fa622b7-d774-4b55-a3e7-2053625177ca\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.080793 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-confd\") pod \"4fa622b7-d774-4b55-a3e7-2053625177ca\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.080824 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-config-data\") pod \"4fa622b7-d774-4b55-a3e7-2053625177ca\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.080909 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-plugins-conf\") pod \"4fa622b7-d774-4b55-a3e7-2053625177ca\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.080936 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"4fa622b7-d774-4b55-a3e7-2053625177ca\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.080973 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-server-conf\") pod \"4fa622b7-d774-4b55-a3e7-2053625177ca\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.081375 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "4fa622b7-d774-4b55-a3e7-2053625177ca" (UID: "4fa622b7-d774-4b55-a3e7-2053625177ca"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.081439 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "4fa622b7-d774-4b55-a3e7-2053625177ca" (UID: "4fa622b7-d774-4b55-a3e7-2053625177ca"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.081545 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-tls\") pod \"4fa622b7-d774-4b55-a3e7-2053625177ca\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.081749 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "4fa622b7-d774-4b55-a3e7-2053625177ca" (UID: "4fa622b7-d774-4b55-a3e7-2053625177ca"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.082023 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4fa622b7-d774-4b55-a3e7-2053625177ca-pod-info\") pod \"4fa622b7-d774-4b55-a3e7-2053625177ca\" (UID: \"4fa622b7-d774-4b55-a3e7-2053625177ca\") " Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.082922 4689 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.082947 4689 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.082965 4689 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.087683 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/4fa622b7-d774-4b55-a3e7-2053625177ca-pod-info" (OuterVolumeSpecName: "pod-info") pod "4fa622b7-d774-4b55-a3e7-2053625177ca" (UID: "4fa622b7-d774-4b55-a3e7-2053625177ca"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.089902 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "persistence") pod "4fa622b7-d774-4b55-a3e7-2053625177ca" (UID: "4fa622b7-d774-4b55-a3e7-2053625177ca"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.090505 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-kube-api-access-c8fsz" (OuterVolumeSpecName: "kube-api-access-c8fsz") pod "4fa622b7-d774-4b55-a3e7-2053625177ca" (UID: "4fa622b7-d774-4b55-a3e7-2053625177ca"). InnerVolumeSpecName "kube-api-access-c8fsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.097358 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "4fa622b7-d774-4b55-a3e7-2053625177ca" (UID: "4fa622b7-d774-4b55-a3e7-2053625177ca"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.097777 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fa622b7-d774-4b55-a3e7-2053625177ca-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "4fa622b7-d774-4b55-a3e7-2053625177ca" (UID: "4fa622b7-d774-4b55-a3e7-2053625177ca"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.111433 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-config-data" (OuterVolumeSpecName: "config-data") pod "4fa622b7-d774-4b55-a3e7-2053625177ca" (UID: "4fa622b7-d774-4b55-a3e7-2053625177ca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.160540 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-server-conf" (OuterVolumeSpecName: "server-conf") pod "4fa622b7-d774-4b55-a3e7-2053625177ca" (UID: "4fa622b7-d774-4b55-a3e7-2053625177ca"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.185084 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.185160 4689 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.185173 4689 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4fa622b7-d774-4b55-a3e7-2053625177ca-server-conf\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.185183 4689 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.185193 4689 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4fa622b7-d774-4b55-a3e7-2053625177ca-pod-info\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.185213 4689 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4fa622b7-d774-4b55-a3e7-2053625177ca-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.185224 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8fsz\" (UniqueName: \"kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-kube-api-access-c8fsz\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.203243 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "4fa622b7-d774-4b55-a3e7-2053625177ca" (UID: "4fa622b7-d774-4b55-a3e7-2053625177ca"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.207344 4689 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.286620 4689 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4fa622b7-d774-4b55-a3e7-2053625177ca-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.286663 4689 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.315548 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.329842 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.340864 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 21:30:43 crc kubenswrapper[4689]: E1013 21:30:43.376923 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fa622b7-d774-4b55-a3e7-2053625177ca" containerName="rabbitmq" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.376966 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fa622b7-d774-4b55-a3e7-2053625177ca" containerName="rabbitmq" Oct 13 21:30:43 crc kubenswrapper[4689]: E1013 21:30:43.377006 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fa622b7-d774-4b55-a3e7-2053625177ca" containerName="setup-container" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.377014 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fa622b7-d774-4b55-a3e7-2053625177ca" containerName="setup-container" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.377264 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fa622b7-d774-4b55-a3e7-2053625177ca" containerName="rabbitmq" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.378278 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.378367 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.380550 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.380579 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.380611 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.380826 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.381045 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.382200 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-l624j" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.387532 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.489721 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/de9fccf5-fe48-498b-a6db-15e734aa9e61-server-conf\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.489869 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/de9fccf5-fe48-498b-a6db-15e734aa9e61-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.489905 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de9fccf5-fe48-498b-a6db-15e734aa9e61-config-data\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.489935 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/de9fccf5-fe48-498b-a6db-15e734aa9e61-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.490017 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.490114 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/de9fccf5-fe48-498b-a6db-15e734aa9e61-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.490136 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/de9fccf5-fe48-498b-a6db-15e734aa9e61-pod-info\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.490212 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/de9fccf5-fe48-498b-a6db-15e734aa9e61-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.490238 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/de9fccf5-fe48-498b-a6db-15e734aa9e61-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.490322 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/de9fccf5-fe48-498b-a6db-15e734aa9e61-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.490375 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmjsf\" (UniqueName: \"kubernetes.io/projected/de9fccf5-fe48-498b-a6db-15e734aa9e61-kube-api-access-wmjsf\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.592346 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/de9fccf5-fe48-498b-a6db-15e734aa9e61-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.592633 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/de9fccf5-fe48-498b-a6db-15e734aa9e61-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.592789 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/de9fccf5-fe48-498b-a6db-15e734aa9e61-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.592890 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmjsf\" (UniqueName: \"kubernetes.io/projected/de9fccf5-fe48-498b-a6db-15e734aa9e61-kube-api-access-wmjsf\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.593006 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/de9fccf5-fe48-498b-a6db-15e734aa9e61-server-conf\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.593427 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/de9fccf5-fe48-498b-a6db-15e734aa9e61-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.594199 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/de9fccf5-fe48-498b-a6db-15e734aa9e61-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.594246 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/de9fccf5-fe48-498b-a6db-15e734aa9e61-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.594304 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de9fccf5-fe48-498b-a6db-15e734aa9e61-config-data\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.594513 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/de9fccf5-fe48-498b-a6db-15e734aa9e61-server-conf\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.594671 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/de9fccf5-fe48-498b-a6db-15e734aa9e61-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.595020 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de9fccf5-fe48-498b-a6db-15e734aa9e61-config-data\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.595051 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/de9fccf5-fe48-498b-a6db-15e734aa9e61-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.595094 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.595215 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.595333 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/de9fccf5-fe48-498b-a6db-15e734aa9e61-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.595354 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/de9fccf5-fe48-498b-a6db-15e734aa9e61-pod-info\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.596215 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/de9fccf5-fe48-498b-a6db-15e734aa9e61-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.596295 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/de9fccf5-fe48-498b-a6db-15e734aa9e61-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.602110 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/de9fccf5-fe48-498b-a6db-15e734aa9e61-pod-info\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.602370 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/de9fccf5-fe48-498b-a6db-15e734aa9e61-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.611238 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmjsf\" (UniqueName: \"kubernetes.io/projected/de9fccf5-fe48-498b-a6db-15e734aa9e61-kube-api-access-wmjsf\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.628412 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"de9fccf5-fe48-498b-a6db-15e734aa9e61\") " pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.711751 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.885822 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fa622b7-d774-4b55-a3e7-2053625177ca" path="/var/lib/kubelet/pods/4fa622b7-d774-4b55-a3e7-2053625177ca/volumes" Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.997885 4689 generic.go:334] "Generic (PLEG): container finished" podID="b974f9f4-057e-4a9c-9835-a9636d5601f8" containerID="e181ed88165040f3f0ece477fd380d3b5203e300d2172dfa5d1a168538ec6a3f" exitCode=0 Oct 13 21:30:43 crc kubenswrapper[4689]: I1013 21:30:43.997929 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b974f9f4-057e-4a9c-9835-a9636d5601f8","Type":"ContainerDied","Data":"e181ed88165040f3f0ece477fd380d3b5203e300d2172dfa5d1a168538ec6a3f"} Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.031091 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.207020 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"b974f9f4-057e-4a9c-9835-a9636d5601f8\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.207394 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b974f9f4-057e-4a9c-9835-a9636d5601f8-pod-info\") pod \"b974f9f4-057e-4a9c-9835-a9636d5601f8\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.207461 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-confd\") pod \"b974f9f4-057e-4a9c-9835-a9636d5601f8\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.207489 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r59sq\" (UniqueName: \"kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-kube-api-access-r59sq\") pod \"b974f9f4-057e-4a9c-9835-a9636d5601f8\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.207510 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-server-conf\") pod \"b974f9f4-057e-4a9c-9835-a9636d5601f8\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.207575 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-config-data\") pod \"b974f9f4-057e-4a9c-9835-a9636d5601f8\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.207618 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-plugins-conf\") pod \"b974f9f4-057e-4a9c-9835-a9636d5601f8\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.207641 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b974f9f4-057e-4a9c-9835-a9636d5601f8-erlang-cookie-secret\") pod \"b974f9f4-057e-4a9c-9835-a9636d5601f8\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.207658 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-tls\") pod \"b974f9f4-057e-4a9c-9835-a9636d5601f8\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.207723 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-plugins\") pod \"b974f9f4-057e-4a9c-9835-a9636d5601f8\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.207748 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-erlang-cookie\") pod \"b974f9f4-057e-4a9c-9835-a9636d5601f8\" (UID: \"b974f9f4-057e-4a9c-9835-a9636d5601f8\") " Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.208748 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "b974f9f4-057e-4a9c-9835-a9636d5601f8" (UID: "b974f9f4-057e-4a9c-9835-a9636d5601f8"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.209173 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "b974f9f4-057e-4a9c-9835-a9636d5601f8" (UID: "b974f9f4-057e-4a9c-9835-a9636d5601f8"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.209367 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "b974f9f4-057e-4a9c-9835-a9636d5601f8" (UID: "b974f9f4-057e-4a9c-9835-a9636d5601f8"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.212718 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "b974f9f4-057e-4a9c-9835-a9636d5601f8" (UID: "b974f9f4-057e-4a9c-9835-a9636d5601f8"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.222278 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/b974f9f4-057e-4a9c-9835-a9636d5601f8-pod-info" (OuterVolumeSpecName: "pod-info") pod "b974f9f4-057e-4a9c-9835-a9636d5601f8" (UID: "b974f9f4-057e-4a9c-9835-a9636d5601f8"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.222404 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-kube-api-access-r59sq" (OuterVolumeSpecName: "kube-api-access-r59sq") pod "b974f9f4-057e-4a9c-9835-a9636d5601f8" (UID: "b974f9f4-057e-4a9c-9835-a9636d5601f8"). InnerVolumeSpecName "kube-api-access-r59sq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.224800 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b974f9f4-057e-4a9c-9835-a9636d5601f8-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "b974f9f4-057e-4a9c-9835-a9636d5601f8" (UID: "b974f9f4-057e-4a9c-9835-a9636d5601f8"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.225696 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "b974f9f4-057e-4a9c-9835-a9636d5601f8" (UID: "b974f9f4-057e-4a9c-9835-a9636d5601f8"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.263117 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.274058 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-config-data" (OuterVolumeSpecName: "config-data") pod "b974f9f4-057e-4a9c-9835-a9636d5601f8" (UID: "b974f9f4-057e-4a9c-9835-a9636d5601f8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.278888 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-server-conf" (OuterVolumeSpecName: "server-conf") pod "b974f9f4-057e-4a9c-9835-a9636d5601f8" (UID: "b974f9f4-057e-4a9c-9835-a9636d5601f8"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.309828 4689 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.309860 4689 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.309889 4689 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.309901 4689 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b974f9f4-057e-4a9c-9835-a9636d5601f8-pod-info\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.309910 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r59sq\" (UniqueName: \"kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-kube-api-access-r59sq\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.309919 4689 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-server-conf\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.309929 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.309937 4689 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b974f9f4-057e-4a9c-9835-a9636d5601f8-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.309947 4689 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b974f9f4-057e-4a9c-9835-a9636d5601f8-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.309955 4689 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.330686 4689 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.332449 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "b974f9f4-057e-4a9c-9835-a9636d5601f8" (UID: "b974f9f4-057e-4a9c-9835-a9636d5601f8"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.411046 4689 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b974f9f4-057e-4a9c-9835-a9636d5601f8-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:44 crc kubenswrapper[4689]: I1013 21:30:44.411070 4689 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.007303 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b974f9f4-057e-4a9c-9835-a9636d5601f8","Type":"ContainerDied","Data":"28a77c2c0807a56dc6a58581f42cf5b777fc9f31422f5f4ad91ce923c8f0d871"} Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.007652 4689 scope.go:117] "RemoveContainer" containerID="e181ed88165040f3f0ece477fd380d3b5203e300d2172dfa5d1a168538ec6a3f" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.007348 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.008488 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"de9fccf5-fe48-498b-a6db-15e734aa9e61","Type":"ContainerStarted","Data":"a44e758b27d2cfa7e9d31e1946b7f1d7831e8208128d4092c1eb79f5462d0e6b"} Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.067875 4689 scope.go:117] "RemoveContainer" containerID="00004912b447434369b82694f633ce610c7441a9cb15a89d73edb1d7eb308492" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.075710 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.097986 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.109340 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 21:30:45 crc kubenswrapper[4689]: E1013 21:30:45.109910 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b974f9f4-057e-4a9c-9835-a9636d5601f8" containerName="rabbitmq" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.109930 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="b974f9f4-057e-4a9c-9835-a9636d5601f8" containerName="rabbitmq" Oct 13 21:30:45 crc kubenswrapper[4689]: E1013 21:30:45.109946 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b974f9f4-057e-4a9c-9835-a9636d5601f8" containerName="setup-container" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.109952 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="b974f9f4-057e-4a9c-9835-a9636d5601f8" containerName="setup-container" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.110139 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="b974f9f4-057e-4a9c-9835-a9636d5601f8" containerName="rabbitmq" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.111216 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.114739 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.114856 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.115083 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.115965 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.120001 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.120307 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-m8fn9" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.120323 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.120375 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.231118 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.231167 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.231202 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.231241 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.231288 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.231334 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.231366 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.231389 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.231425 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.231497 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.231524 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6q6fq\" (UniqueName: \"kubernetes.io/projected/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-kube-api-access-6q6fq\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.332598 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.332651 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.332670 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.332702 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.332759 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.332779 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6q6fq\" (UniqueName: \"kubernetes.io/projected/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-kube-api-access-6q6fq\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.332812 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.332842 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.332863 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.332893 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.332959 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.333292 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.333437 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.333819 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.334062 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.334230 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.335006 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.380646 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.380969 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.381113 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.382709 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.398961 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6q6fq\" (UniqueName: \"kubernetes.io/projected/4448f3de-e179-4a5c-8a6d-dd16b725bb0c-kube-api-access-6q6fq\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.416024 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"4448f3de-e179-4a5c-8a6d-dd16b725bb0c\") " pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.535441 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.881717 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b974f9f4-057e-4a9c-9835-a9636d5601f8" path="/var/lib/kubelet/pods/b974f9f4-057e-4a9c-9835-a9636d5601f8/volumes" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.943396 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-nfbw5"] Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.945506 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.953570 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Oct 13 21:30:45 crc kubenswrapper[4689]: I1013 21:30:45.955561 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-nfbw5"] Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.017557 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"de9fccf5-fe48-498b-a6db-15e734aa9e61","Type":"ContainerStarted","Data":"9f7da4b6a0568fd0bcbf5d1e8c2a68b601a8c068c5b08b9d32dc733b3b6cae92"} Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.048302 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.051888 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.051946 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.051973 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.052014 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.052071 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.052102 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thw27\" (UniqueName: \"kubernetes.io/projected/73cc21a9-5896-45f8-982e-8e44bccdd5ab-kube-api-access-thw27\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.052173 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-config\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.090507 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-nfbw5"] Oct 13 21:30:46 crc kubenswrapper[4689]: E1013 21:30:46.091255 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc dns-swift-storage-0 kube-api-access-thw27 openstack-edpm-ipam ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" podUID="73cc21a9-5896-45f8-982e-8e44bccdd5ab" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.107711 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55478c4467-vrcxh"] Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.109318 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.127455 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55478c4467-vrcxh"] Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.158442 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.158508 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.158529 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.158927 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.158959 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.159053 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.159078 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thw27\" (UniqueName: \"kubernetes.io/projected/73cc21a9-5896-45f8-982e-8e44bccdd5ab-kube-api-access-thw27\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.159134 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-config\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.159884 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.159902 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.160143 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.160535 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-config\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.160771 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.178024 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thw27\" (UniqueName: \"kubernetes.io/projected/73cc21a9-5896-45f8-982e-8e44bccdd5ab-kube-api-access-thw27\") pod \"dnsmasq-dns-79bd4cc8c9-nfbw5\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.260765 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-dns-swift-storage-0\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.260819 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-config\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.260953 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmq5k\" (UniqueName: \"kubernetes.io/projected/0c646c73-577a-42ba-8aa7-39bac477cb15-kube-api-access-mmq5k\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.260981 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-openstack-edpm-ipam\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.261082 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-ovsdbserver-nb\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.261133 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-ovsdbserver-sb\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.261167 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-dns-svc\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.362491 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-ovsdbserver-nb\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.362563 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-ovsdbserver-sb\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.362615 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-dns-svc\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.362653 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-dns-swift-storage-0\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.362675 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-config\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.362721 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmq5k\" (UniqueName: \"kubernetes.io/projected/0c646c73-577a-42ba-8aa7-39bac477cb15-kube-api-access-mmq5k\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.362738 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-openstack-edpm-ipam\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.363649 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-ovsdbserver-sb\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.363720 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-ovsdbserver-nb\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.363958 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-openstack-edpm-ipam\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.364091 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-dns-swift-storage-0\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.364132 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-config\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.364283 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0c646c73-577a-42ba-8aa7-39bac477cb15-dns-svc\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.380294 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmq5k\" (UniqueName: \"kubernetes.io/projected/0c646c73-577a-42ba-8aa7-39bac477cb15-kube-api-access-mmq5k\") pod \"dnsmasq-dns-55478c4467-vrcxh\" (UID: \"0c646c73-577a-42ba-8aa7-39bac477cb15\") " pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:46 crc kubenswrapper[4689]: I1013 21:30:46.503644 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.033194 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.034165 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4448f3de-e179-4a5c-8a6d-dd16b725bb0c","Type":"ContainerStarted","Data":"3b973985f5c893d2f49e04a02c7f50fdf2f47b1a13785c3f0f0ccd4fb86676a3"} Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.047060 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.075255 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thw27\" (UniqueName: \"kubernetes.io/projected/73cc21a9-5896-45f8-982e-8e44bccdd5ab-kube-api-access-thw27\") pod \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.075347 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-config\") pod \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.075407 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-openstack-edpm-ipam\") pod \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.075613 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-ovsdbserver-nb\") pod \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.075658 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-ovsdbserver-sb\") pod \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.076207 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "73cc21a9-5896-45f8-982e-8e44bccdd5ab" (UID: "73cc21a9-5896-45f8-982e-8e44bccdd5ab"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.076228 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "73cc21a9-5896-45f8-982e-8e44bccdd5ab" (UID: "73cc21a9-5896-45f8-982e-8e44bccdd5ab"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.076274 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "73cc21a9-5896-45f8-982e-8e44bccdd5ab" (UID: "73cc21a9-5896-45f8-982e-8e44bccdd5ab"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.076290 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-config" (OuterVolumeSpecName: "config") pod "73cc21a9-5896-45f8-982e-8e44bccdd5ab" (UID: "73cc21a9-5896-45f8-982e-8e44bccdd5ab"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.076378 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-dns-swift-storage-0\") pod \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.076480 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-dns-svc\") pod \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\" (UID: \"73cc21a9-5896-45f8-982e-8e44bccdd5ab\") " Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.076822 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "73cc21a9-5896-45f8-982e-8e44bccdd5ab" (UID: "73cc21a9-5896-45f8-982e-8e44bccdd5ab"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.077107 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "73cc21a9-5896-45f8-982e-8e44bccdd5ab" (UID: "73cc21a9-5896-45f8-982e-8e44bccdd5ab"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.078318 4689 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.078341 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.078356 4689 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.078370 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.078381 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.078430 4689 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/73cc21a9-5896-45f8-982e-8e44bccdd5ab-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.080825 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73cc21a9-5896-45f8-982e-8e44bccdd5ab-kube-api-access-thw27" (OuterVolumeSpecName: "kube-api-access-thw27") pod "73cc21a9-5896-45f8-982e-8e44bccdd5ab" (UID: "73cc21a9-5896-45f8-982e-8e44bccdd5ab"). InnerVolumeSpecName "kube-api-access-thw27". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.180595 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thw27\" (UniqueName: \"kubernetes.io/projected/73cc21a9-5896-45f8-982e-8e44bccdd5ab-kube-api-access-thw27\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:47 crc kubenswrapper[4689]: I1013 21:30:47.701142 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55478c4467-vrcxh"] Oct 13 21:30:48 crc kubenswrapper[4689]: I1013 21:30:48.053921 4689 generic.go:334] "Generic (PLEG): container finished" podID="0c646c73-577a-42ba-8aa7-39bac477cb15" containerID="204302b9e1a1fed5a40a5224b7ceb86e5945a2aff9831220bc8f241abb0cb2a7" exitCode=0 Oct 13 21:30:48 crc kubenswrapper[4689]: I1013 21:30:48.053981 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55478c4467-vrcxh" event={"ID":"0c646c73-577a-42ba-8aa7-39bac477cb15","Type":"ContainerDied","Data":"204302b9e1a1fed5a40a5224b7ceb86e5945a2aff9831220bc8f241abb0cb2a7"} Oct 13 21:30:48 crc kubenswrapper[4689]: I1013 21:30:48.054264 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55478c4467-vrcxh" event={"ID":"0c646c73-577a-42ba-8aa7-39bac477cb15","Type":"ContainerStarted","Data":"d8cbab48fe8eaac35833447aefb13d16f02e0a83e76b3fb9113ec47c284ec170"} Oct 13 21:30:48 crc kubenswrapper[4689]: I1013 21:30:48.058801 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4448f3de-e179-4a5c-8a6d-dd16b725bb0c","Type":"ContainerStarted","Data":"d515333b0e3274da17eff491a100dc5d5752b9f8e8d2eb26e3a3e5dc4641194c"} Oct 13 21:30:48 crc kubenswrapper[4689]: I1013 21:30:48.058853 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-nfbw5" Oct 13 21:30:48 crc kubenswrapper[4689]: I1013 21:30:48.248150 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-nfbw5"] Oct 13 21:30:48 crc kubenswrapper[4689]: I1013 21:30:48.259985 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-nfbw5"] Oct 13 21:30:49 crc kubenswrapper[4689]: I1013 21:30:49.076779 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55478c4467-vrcxh" event={"ID":"0c646c73-577a-42ba-8aa7-39bac477cb15","Type":"ContainerStarted","Data":"d8866994a4529c746af3415a202dd5ca88974d9e7bc26a34f3b1bc9785046ff4"} Oct 13 21:30:49 crc kubenswrapper[4689]: I1013 21:30:49.101582 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55478c4467-vrcxh" podStartSLOduration=3.101562045 podStartE2EDuration="3.101562045s" podCreationTimestamp="2025-10-13 21:30:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:30:49.092154132 +0000 UTC m=+1166.010399227" watchObservedRunningTime="2025-10-13 21:30:49.101562045 +0000 UTC m=+1166.019807130" Oct 13 21:30:49 crc kubenswrapper[4689]: I1013 21:30:49.881840 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73cc21a9-5896-45f8-982e-8e44bccdd5ab" path="/var/lib/kubelet/pods/73cc21a9-5896-45f8-982e-8e44bccdd5ab/volumes" Oct 13 21:30:50 crc kubenswrapper[4689]: I1013 21:30:50.086438 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:56 crc kubenswrapper[4689]: I1013 21:30:56.504926 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55478c4467-vrcxh" Oct 13 21:30:56 crc kubenswrapper[4689]: I1013 21:30:56.584996 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-gkk99"] Oct 13 21:30:56 crc kubenswrapper[4689]: I1013 21:30:56.585379 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" podUID="0c640abb-53ea-4a06-80b4-5e3d8df8f0fa" containerName="dnsmasq-dns" containerID="cri-o://4e866764e4423b401700ef7b154a4db96d7e8759e97e912f79146763888c88eb" gracePeriod=10 Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.162448 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.164031 4689 generic.go:334] "Generic (PLEG): container finished" podID="0c640abb-53ea-4a06-80b4-5e3d8df8f0fa" containerID="4e866764e4423b401700ef7b154a4db96d7e8759e97e912f79146763888c88eb" exitCode=0 Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.164073 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" event={"ID":"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa","Type":"ContainerDied","Data":"4e866764e4423b401700ef7b154a4db96d7e8759e97e912f79146763888c88eb"} Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.164102 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" event={"ID":"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa","Type":"ContainerDied","Data":"329d1acaa977109e76eea07f0c7ebd3c713341bad522401a4e6e547efe615c9e"} Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.164120 4689 scope.go:117] "RemoveContainer" containerID="4e866764e4423b401700ef7b154a4db96d7e8759e97e912f79146763888c88eb" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.187892 4689 scope.go:117] "RemoveContainer" containerID="2b5daf233bf8dda8d4106390cd17bacd27201c734f6ce8ba349c93e34d93740f" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.208535 4689 scope.go:117] "RemoveContainer" containerID="4e866764e4423b401700ef7b154a4db96d7e8759e97e912f79146763888c88eb" Oct 13 21:30:57 crc kubenswrapper[4689]: E1013 21:30:57.208935 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e866764e4423b401700ef7b154a4db96d7e8759e97e912f79146763888c88eb\": container with ID starting with 4e866764e4423b401700ef7b154a4db96d7e8759e97e912f79146763888c88eb not found: ID does not exist" containerID="4e866764e4423b401700ef7b154a4db96d7e8759e97e912f79146763888c88eb" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.209017 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e866764e4423b401700ef7b154a4db96d7e8759e97e912f79146763888c88eb"} err="failed to get container status \"4e866764e4423b401700ef7b154a4db96d7e8759e97e912f79146763888c88eb\": rpc error: code = NotFound desc = could not find container \"4e866764e4423b401700ef7b154a4db96d7e8759e97e912f79146763888c88eb\": container with ID starting with 4e866764e4423b401700ef7b154a4db96d7e8759e97e912f79146763888c88eb not found: ID does not exist" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.209061 4689 scope.go:117] "RemoveContainer" containerID="2b5daf233bf8dda8d4106390cd17bacd27201c734f6ce8ba349c93e34d93740f" Oct 13 21:30:57 crc kubenswrapper[4689]: E1013 21:30:57.209396 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b5daf233bf8dda8d4106390cd17bacd27201c734f6ce8ba349c93e34d93740f\": container with ID starting with 2b5daf233bf8dda8d4106390cd17bacd27201c734f6ce8ba349c93e34d93740f not found: ID does not exist" containerID="2b5daf233bf8dda8d4106390cd17bacd27201c734f6ce8ba349c93e34d93740f" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.209428 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b5daf233bf8dda8d4106390cd17bacd27201c734f6ce8ba349c93e34d93740f"} err="failed to get container status \"2b5daf233bf8dda8d4106390cd17bacd27201c734f6ce8ba349c93e34d93740f\": rpc error: code = NotFound desc = could not find container \"2b5daf233bf8dda8d4106390cd17bacd27201c734f6ce8ba349c93e34d93740f\": container with ID starting with 2b5daf233bf8dda8d4106390cd17bacd27201c734f6ce8ba349c93e34d93740f not found: ID does not exist" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.334066 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-ovsdbserver-nb\") pod \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.334224 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-684dh\" (UniqueName: \"kubernetes.io/projected/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-kube-api-access-684dh\") pod \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.334891 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-config\") pod \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.334959 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-ovsdbserver-sb\") pod \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.335020 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-dns-swift-storage-0\") pod \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.335067 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-dns-svc\") pod \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\" (UID: \"0c640abb-53ea-4a06-80b4-5e3d8df8f0fa\") " Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.339991 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-kube-api-access-684dh" (OuterVolumeSpecName: "kube-api-access-684dh") pod "0c640abb-53ea-4a06-80b4-5e3d8df8f0fa" (UID: "0c640abb-53ea-4a06-80b4-5e3d8df8f0fa"). InnerVolumeSpecName "kube-api-access-684dh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.384566 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0c640abb-53ea-4a06-80b4-5e3d8df8f0fa" (UID: "0c640abb-53ea-4a06-80b4-5e3d8df8f0fa"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.389146 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0c640abb-53ea-4a06-80b4-5e3d8df8f0fa" (UID: "0c640abb-53ea-4a06-80b4-5e3d8df8f0fa"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.393084 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0c640abb-53ea-4a06-80b4-5e3d8df8f0fa" (UID: "0c640abb-53ea-4a06-80b4-5e3d8df8f0fa"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.394412 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-config" (OuterVolumeSpecName: "config") pod "0c640abb-53ea-4a06-80b4-5e3d8df8f0fa" (UID: "0c640abb-53ea-4a06-80b4-5e3d8df8f0fa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.398135 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0c640abb-53ea-4a06-80b4-5e3d8df8f0fa" (UID: "0c640abb-53ea-4a06-80b4-5e3d8df8f0fa"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.436209 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-684dh\" (UniqueName: \"kubernetes.io/projected/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-kube-api-access-684dh\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.436260 4689 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-config\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.436273 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.436281 4689 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.436290 4689 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:57 crc kubenswrapper[4689]: I1013 21:30:57.436299 4689 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 13 21:30:58 crc kubenswrapper[4689]: I1013 21:30:58.180739 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-gkk99" Oct 13 21:30:58 crc kubenswrapper[4689]: I1013 21:30:58.204922 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-gkk99"] Oct 13 21:30:58 crc kubenswrapper[4689]: I1013 21:30:58.212932 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-gkk99"] Oct 13 21:30:59 crc kubenswrapper[4689]: I1013 21:30:59.878815 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c640abb-53ea-4a06-80b4-5e3d8df8f0fa" path="/var/lib/kubelet/pods/0c640abb-53ea-4a06-80b4-5e3d8df8f0fa/volumes" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.748351 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q"] Oct 13 21:31:08 crc kubenswrapper[4689]: E1013 21:31:08.749446 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c640abb-53ea-4a06-80b4-5e3d8df8f0fa" containerName="init" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.749464 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c640abb-53ea-4a06-80b4-5e3d8df8f0fa" containerName="init" Oct 13 21:31:08 crc kubenswrapper[4689]: E1013 21:31:08.749483 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c640abb-53ea-4a06-80b4-5e3d8df8f0fa" containerName="dnsmasq-dns" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.749490 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c640abb-53ea-4a06-80b4-5e3d8df8f0fa" containerName="dnsmasq-dns" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.749754 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c640abb-53ea-4a06-80b4-5e3d8df8f0fa" containerName="dnsmasq-dns" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.750629 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.753070 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d5nnx" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.753505 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.753670 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.765299 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.775228 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wd89b\" (UniqueName: \"kubernetes.io/projected/44571d1c-f8f4-442a-ac47-51d05df37bfc-kube-api-access-wd89b\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q\" (UID: \"44571d1c-f8f4-442a-ac47-51d05df37bfc\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.775294 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q\" (UID: \"44571d1c-f8f4-442a-ac47-51d05df37bfc\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.775410 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q\" (UID: \"44571d1c-f8f4-442a-ac47-51d05df37bfc\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.775505 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q\" (UID: \"44571d1c-f8f4-442a-ac47-51d05df37bfc\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.776075 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q"] Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.877321 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q\" (UID: \"44571d1c-f8f4-442a-ac47-51d05df37bfc\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.877385 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wd89b\" (UniqueName: \"kubernetes.io/projected/44571d1c-f8f4-442a-ac47-51d05df37bfc-kube-api-access-wd89b\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q\" (UID: \"44571d1c-f8f4-442a-ac47-51d05df37bfc\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.877426 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q\" (UID: \"44571d1c-f8f4-442a-ac47-51d05df37bfc\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.877497 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q\" (UID: \"44571d1c-f8f4-442a-ac47-51d05df37bfc\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.883824 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q\" (UID: \"44571d1c-f8f4-442a-ac47-51d05df37bfc\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.883857 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q\" (UID: \"44571d1c-f8f4-442a-ac47-51d05df37bfc\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.890646 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q\" (UID: \"44571d1c-f8f4-442a-ac47-51d05df37bfc\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" Oct 13 21:31:08 crc kubenswrapper[4689]: I1013 21:31:08.894464 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wd89b\" (UniqueName: \"kubernetes.io/projected/44571d1c-f8f4-442a-ac47-51d05df37bfc-kube-api-access-wd89b\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q\" (UID: \"44571d1c-f8f4-442a-ac47-51d05df37bfc\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" Oct 13 21:31:09 crc kubenswrapper[4689]: I1013 21:31:09.091831 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" Oct 13 21:31:09 crc kubenswrapper[4689]: I1013 21:31:09.635284 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q"] Oct 13 21:31:09 crc kubenswrapper[4689]: I1013 21:31:09.648963 4689 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 21:31:10 crc kubenswrapper[4689]: I1013 21:31:10.296398 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" event={"ID":"44571d1c-f8f4-442a-ac47-51d05df37bfc","Type":"ContainerStarted","Data":"c430759dd71ce1d2dc0a3d4f670e636ea3cd34eebdc7b9ef847a163460f4af2c"} Oct 13 21:31:18 crc kubenswrapper[4689]: I1013 21:31:18.390786 4689 generic.go:334] "Generic (PLEG): container finished" podID="de9fccf5-fe48-498b-a6db-15e734aa9e61" containerID="9f7da4b6a0568fd0bcbf5d1e8c2a68b601a8c068c5b08b9d32dc733b3b6cae92" exitCode=0 Oct 13 21:31:18 crc kubenswrapper[4689]: I1013 21:31:18.390873 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"de9fccf5-fe48-498b-a6db-15e734aa9e61","Type":"ContainerDied","Data":"9f7da4b6a0568fd0bcbf5d1e8c2a68b601a8c068c5b08b9d32dc733b3b6cae92"} Oct 13 21:31:18 crc kubenswrapper[4689]: I1013 21:31:18.392690 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" event={"ID":"44571d1c-f8f4-442a-ac47-51d05df37bfc","Type":"ContainerStarted","Data":"216bb81881ced4db36d371749f116f91e10ac40b20bd16e126e6b914e944f200"} Oct 13 21:31:18 crc kubenswrapper[4689]: I1013 21:31:18.474530 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" podStartSLOduration=2.679975663 podStartE2EDuration="10.474506148s" podCreationTimestamp="2025-10-13 21:31:08 +0000 UTC" firstStartedPulling="2025-10-13 21:31:09.648755228 +0000 UTC m=+1186.567000313" lastFinishedPulling="2025-10-13 21:31:17.443285713 +0000 UTC m=+1194.361530798" observedRunningTime="2025-10-13 21:31:18.461001189 +0000 UTC m=+1195.379246284" watchObservedRunningTime="2025-10-13 21:31:18.474506148 +0000 UTC m=+1195.392751233" Oct 13 21:31:19 crc kubenswrapper[4689]: I1013 21:31:19.402366 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"de9fccf5-fe48-498b-a6db-15e734aa9e61","Type":"ContainerStarted","Data":"b656b77aa47cf8f395c4a0849eb48bf74148234267d3a235ec1040336fb4aa62"} Oct 13 21:31:19 crc kubenswrapper[4689]: I1013 21:31:19.402852 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 13 21:31:19 crc kubenswrapper[4689]: I1013 21:31:19.424539 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.424519384 podStartE2EDuration="36.424519384s" podCreationTimestamp="2025-10-13 21:30:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:31:19.419766892 +0000 UTC m=+1196.338011987" watchObservedRunningTime="2025-10-13 21:31:19.424519384 +0000 UTC m=+1196.342764469" Oct 13 21:31:20 crc kubenswrapper[4689]: I1013 21:31:20.417721 4689 generic.go:334] "Generic (PLEG): container finished" podID="4448f3de-e179-4a5c-8a6d-dd16b725bb0c" containerID="d515333b0e3274da17eff491a100dc5d5752b9f8e8d2eb26e3a3e5dc4641194c" exitCode=0 Oct 13 21:31:20 crc kubenswrapper[4689]: I1013 21:31:20.417800 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4448f3de-e179-4a5c-8a6d-dd16b725bb0c","Type":"ContainerDied","Data":"d515333b0e3274da17eff491a100dc5d5752b9f8e8d2eb26e3a3e5dc4641194c"} Oct 13 21:31:21 crc kubenswrapper[4689]: I1013 21:31:21.435538 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4448f3de-e179-4a5c-8a6d-dd16b725bb0c","Type":"ContainerStarted","Data":"077fd6563eee8a4a0630a94d8d070f5fcb29578020ed2d285ea87b8a43f7b346"} Oct 13 21:31:21 crc kubenswrapper[4689]: I1013 21:31:21.436088 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:31:21 crc kubenswrapper[4689]: I1013 21:31:21.471556 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.471537839 podStartE2EDuration="36.471537839s" podCreationTimestamp="2025-10-13 21:30:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 21:31:21.467028683 +0000 UTC m=+1198.385273858" watchObservedRunningTime="2025-10-13 21:31:21.471537839 +0000 UTC m=+1198.389782924" Oct 13 21:31:23 crc kubenswrapper[4689]: I1013 21:31:23.858633 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:31:23 crc kubenswrapper[4689]: I1013 21:31:23.859005 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:31:25 crc kubenswrapper[4689]: I1013 21:31:25.194160 4689 scope.go:117] "RemoveContainer" containerID="dfe59ffe3aa34df93c9965de5599ba0bbb72b0e619e63cd7355737a1e3eab3de" Oct 13 21:31:25 crc kubenswrapper[4689]: I1013 21:31:25.255462 4689 scope.go:117] "RemoveContainer" containerID="ba815326d04561957cc286379170479e6b8986e6b593f3a86d453d21efadc3b3" Oct 13 21:31:25 crc kubenswrapper[4689]: I1013 21:31:25.295846 4689 scope.go:117] "RemoveContainer" containerID="835d5edfb8ef46446742b465584238dc18dfab141e7bdc04f1abc54a06e6cacf" Oct 13 21:31:29 crc kubenswrapper[4689]: I1013 21:31:29.502795 4689 generic.go:334] "Generic (PLEG): container finished" podID="44571d1c-f8f4-442a-ac47-51d05df37bfc" containerID="216bb81881ced4db36d371749f116f91e10ac40b20bd16e126e6b914e944f200" exitCode=0 Oct 13 21:31:29 crc kubenswrapper[4689]: I1013 21:31:29.502897 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" event={"ID":"44571d1c-f8f4-442a-ac47-51d05df37bfc","Type":"ContainerDied","Data":"216bb81881ced4db36d371749f116f91e10ac40b20bd16e126e6b914e944f200"} Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.054652 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.127169 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-inventory\") pod \"44571d1c-f8f4-442a-ac47-51d05df37bfc\" (UID: \"44571d1c-f8f4-442a-ac47-51d05df37bfc\") " Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.127299 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-ssh-key\") pod \"44571d1c-f8f4-442a-ac47-51d05df37bfc\" (UID: \"44571d1c-f8f4-442a-ac47-51d05df37bfc\") " Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.127384 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-repo-setup-combined-ca-bundle\") pod \"44571d1c-f8f4-442a-ac47-51d05df37bfc\" (UID: \"44571d1c-f8f4-442a-ac47-51d05df37bfc\") " Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.127420 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wd89b\" (UniqueName: \"kubernetes.io/projected/44571d1c-f8f4-442a-ac47-51d05df37bfc-kube-api-access-wd89b\") pod \"44571d1c-f8f4-442a-ac47-51d05df37bfc\" (UID: \"44571d1c-f8f4-442a-ac47-51d05df37bfc\") " Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.137799 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "44571d1c-f8f4-442a-ac47-51d05df37bfc" (UID: "44571d1c-f8f4-442a-ac47-51d05df37bfc"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.137855 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44571d1c-f8f4-442a-ac47-51d05df37bfc-kube-api-access-wd89b" (OuterVolumeSpecName: "kube-api-access-wd89b") pod "44571d1c-f8f4-442a-ac47-51d05df37bfc" (UID: "44571d1c-f8f4-442a-ac47-51d05df37bfc"). InnerVolumeSpecName "kube-api-access-wd89b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.163508 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "44571d1c-f8f4-442a-ac47-51d05df37bfc" (UID: "44571d1c-f8f4-442a-ac47-51d05df37bfc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.169691 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-inventory" (OuterVolumeSpecName: "inventory") pod "44571d1c-f8f4-442a-ac47-51d05df37bfc" (UID: "44571d1c-f8f4-442a-ac47-51d05df37bfc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.229965 4689 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.230003 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wd89b\" (UniqueName: \"kubernetes.io/projected/44571d1c-f8f4-442a-ac47-51d05df37bfc-kube-api-access-wd89b\") on node \"crc\" DevicePath \"\"" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.230019 4689 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.230031 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44571d1c-f8f4-442a-ac47-51d05df37bfc-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.523309 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" event={"ID":"44571d1c-f8f4-442a-ac47-51d05df37bfc","Type":"ContainerDied","Data":"c430759dd71ce1d2dc0a3d4f670e636ea3cd34eebdc7b9ef847a163460f4af2c"} Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.523368 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c430759dd71ce1d2dc0a3d4f670e636ea3cd34eebdc7b9ef847a163460f4af2c" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.523379 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.604705 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m"] Oct 13 21:31:31 crc kubenswrapper[4689]: E1013 21:31:31.605222 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44571d1c-f8f4-442a-ac47-51d05df37bfc" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.605251 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="44571d1c-f8f4-442a-ac47-51d05df37bfc" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.605515 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="44571d1c-f8f4-442a-ac47-51d05df37bfc" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.606345 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.612701 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.612701 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.612768 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.613214 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d5nnx" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.622375 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m"] Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.749537 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpsgf\" (UniqueName: \"kubernetes.io/projected/e6f26597-49c3-41a5-8352-cef0d439fd5c-kube-api-access-tpsgf\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-65d9m\" (UID: \"e6f26597-49c3-41a5-8352-cef0d439fd5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.749848 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e6f26597-49c3-41a5-8352-cef0d439fd5c-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-65d9m\" (UID: \"e6f26597-49c3-41a5-8352-cef0d439fd5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.749983 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e6f26597-49c3-41a5-8352-cef0d439fd5c-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-65d9m\" (UID: \"e6f26597-49c3-41a5-8352-cef0d439fd5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.852005 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e6f26597-49c3-41a5-8352-cef0d439fd5c-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-65d9m\" (UID: \"e6f26597-49c3-41a5-8352-cef0d439fd5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.852139 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpsgf\" (UniqueName: \"kubernetes.io/projected/e6f26597-49c3-41a5-8352-cef0d439fd5c-kube-api-access-tpsgf\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-65d9m\" (UID: \"e6f26597-49c3-41a5-8352-cef0d439fd5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.852281 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e6f26597-49c3-41a5-8352-cef0d439fd5c-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-65d9m\" (UID: \"e6f26597-49c3-41a5-8352-cef0d439fd5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.857018 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e6f26597-49c3-41a5-8352-cef0d439fd5c-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-65d9m\" (UID: \"e6f26597-49c3-41a5-8352-cef0d439fd5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.864509 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e6f26597-49c3-41a5-8352-cef0d439fd5c-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-65d9m\" (UID: \"e6f26597-49c3-41a5-8352-cef0d439fd5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.870140 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpsgf\" (UniqueName: \"kubernetes.io/projected/e6f26597-49c3-41a5-8352-cef0d439fd5c-kube-api-access-tpsgf\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-65d9m\" (UID: \"e6f26597-49c3-41a5-8352-cef0d439fd5c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" Oct 13 21:31:31 crc kubenswrapper[4689]: I1013 21:31:31.926535 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" Oct 13 21:31:32 crc kubenswrapper[4689]: I1013 21:31:32.493845 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m"] Oct 13 21:31:32 crc kubenswrapper[4689]: I1013 21:31:32.532732 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" event={"ID":"e6f26597-49c3-41a5-8352-cef0d439fd5c","Type":"ContainerStarted","Data":"a3f86468b6a4e7dc546d7e227040fcf530d93c34fe5815621029244650e58607"} Oct 13 21:31:33 crc kubenswrapper[4689]: I1013 21:31:33.540339 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" event={"ID":"e6f26597-49c3-41a5-8352-cef0d439fd5c","Type":"ContainerStarted","Data":"5fe211a8e58999287e8c8b8338525822acd5536b35c0776f5e0298d160243212"} Oct 13 21:31:33 crc kubenswrapper[4689]: I1013 21:31:33.565158 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" podStartSLOduration=1.946078404 podStartE2EDuration="2.565135414s" podCreationTimestamp="2025-10-13 21:31:31 +0000 UTC" firstStartedPulling="2025-10-13 21:31:32.507555507 +0000 UTC m=+1209.425800592" lastFinishedPulling="2025-10-13 21:31:33.126612517 +0000 UTC m=+1210.044857602" observedRunningTime="2025-10-13 21:31:33.556038259 +0000 UTC m=+1210.474283344" watchObservedRunningTime="2025-10-13 21:31:33.565135414 +0000 UTC m=+1210.483380499" Oct 13 21:31:33 crc kubenswrapper[4689]: I1013 21:31:33.715143 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 13 21:31:35 crc kubenswrapper[4689]: I1013 21:31:35.537833 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 13 21:31:36 crc kubenswrapper[4689]: I1013 21:31:36.568987 4689 generic.go:334] "Generic (PLEG): container finished" podID="e6f26597-49c3-41a5-8352-cef0d439fd5c" containerID="5fe211a8e58999287e8c8b8338525822acd5536b35c0776f5e0298d160243212" exitCode=0 Oct 13 21:31:36 crc kubenswrapper[4689]: I1013 21:31:36.569041 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" event={"ID":"e6f26597-49c3-41a5-8352-cef0d439fd5c","Type":"ContainerDied","Data":"5fe211a8e58999287e8c8b8338525822acd5536b35c0776f5e0298d160243212"} Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.017459 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.072469 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e6f26597-49c3-41a5-8352-cef0d439fd5c-inventory\") pod \"e6f26597-49c3-41a5-8352-cef0d439fd5c\" (UID: \"e6f26597-49c3-41a5-8352-cef0d439fd5c\") " Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.072513 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e6f26597-49c3-41a5-8352-cef0d439fd5c-ssh-key\") pod \"e6f26597-49c3-41a5-8352-cef0d439fd5c\" (UID: \"e6f26597-49c3-41a5-8352-cef0d439fd5c\") " Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.072687 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpsgf\" (UniqueName: \"kubernetes.io/projected/e6f26597-49c3-41a5-8352-cef0d439fd5c-kube-api-access-tpsgf\") pod \"e6f26597-49c3-41a5-8352-cef0d439fd5c\" (UID: \"e6f26597-49c3-41a5-8352-cef0d439fd5c\") " Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.091669 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6f26597-49c3-41a5-8352-cef0d439fd5c-kube-api-access-tpsgf" (OuterVolumeSpecName: "kube-api-access-tpsgf") pod "e6f26597-49c3-41a5-8352-cef0d439fd5c" (UID: "e6f26597-49c3-41a5-8352-cef0d439fd5c"). InnerVolumeSpecName "kube-api-access-tpsgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.153209 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6f26597-49c3-41a5-8352-cef0d439fd5c-inventory" (OuterVolumeSpecName: "inventory") pod "e6f26597-49c3-41a5-8352-cef0d439fd5c" (UID: "e6f26597-49c3-41a5-8352-cef0d439fd5c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.186836 4689 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e6f26597-49c3-41a5-8352-cef0d439fd5c-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.186876 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpsgf\" (UniqueName: \"kubernetes.io/projected/e6f26597-49c3-41a5-8352-cef0d439fd5c-kube-api-access-tpsgf\") on node \"crc\" DevicePath \"\"" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.197763 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6f26597-49c3-41a5-8352-cef0d439fd5c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e6f26597-49c3-41a5-8352-cef0d439fd5c" (UID: "e6f26597-49c3-41a5-8352-cef0d439fd5c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.288809 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e6f26597-49c3-41a5-8352-cef0d439fd5c-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.589634 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" event={"ID":"e6f26597-49c3-41a5-8352-cef0d439fd5c","Type":"ContainerDied","Data":"a3f86468b6a4e7dc546d7e227040fcf530d93c34fe5815621029244650e58607"} Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.589688 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a3f86468b6a4e7dc546d7e227040fcf530d93c34fe5815621029244650e58607" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.589932 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-65d9m" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.668721 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk"] Oct 13 21:31:38 crc kubenswrapper[4689]: E1013 21:31:38.669471 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6f26597-49c3-41a5-8352-cef0d439fd5c" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.669576 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6f26597-49c3-41a5-8352-cef0d439fd5c" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.669917 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6f26597-49c3-41a5-8352-cef0d439fd5c" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.671029 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.674214 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d5nnx" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.674294 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.674433 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.675129 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.683021 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk"] Oct 13 21:31:38 crc kubenswrapper[4689]: E1013 21:31:38.742634 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode6f26597_49c3_41a5_8352_cef0d439fd5c.slice/crio-a3f86468b6a4e7dc546d7e227040fcf530d93c34fe5815621029244650e58607\": RecentStats: unable to find data in memory cache]" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.798355 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk\" (UID: \"1355811c-482d-4b45-b7cb-7e16b64debf6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.798464 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fvdq\" (UniqueName: \"kubernetes.io/projected/1355811c-482d-4b45-b7cb-7e16b64debf6-kube-api-access-2fvdq\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk\" (UID: \"1355811c-482d-4b45-b7cb-7e16b64debf6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.798493 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk\" (UID: \"1355811c-482d-4b45-b7cb-7e16b64debf6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.798559 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk\" (UID: \"1355811c-482d-4b45-b7cb-7e16b64debf6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.900558 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk\" (UID: \"1355811c-482d-4b45-b7cb-7e16b64debf6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.900695 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fvdq\" (UniqueName: \"kubernetes.io/projected/1355811c-482d-4b45-b7cb-7e16b64debf6-kube-api-access-2fvdq\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk\" (UID: \"1355811c-482d-4b45-b7cb-7e16b64debf6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.900735 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk\" (UID: \"1355811c-482d-4b45-b7cb-7e16b64debf6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.900787 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk\" (UID: \"1355811c-482d-4b45-b7cb-7e16b64debf6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.904954 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk\" (UID: \"1355811c-482d-4b45-b7cb-7e16b64debf6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.905311 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk\" (UID: \"1355811c-482d-4b45-b7cb-7e16b64debf6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.905878 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk\" (UID: \"1355811c-482d-4b45-b7cb-7e16b64debf6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.918363 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fvdq\" (UniqueName: \"kubernetes.io/projected/1355811c-482d-4b45-b7cb-7e16b64debf6-kube-api-access-2fvdq\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk\" (UID: \"1355811c-482d-4b45-b7cb-7e16b64debf6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" Oct 13 21:31:38 crc kubenswrapper[4689]: I1013 21:31:38.996021 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" Oct 13 21:31:39 crc kubenswrapper[4689]: I1013 21:31:39.543429 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk"] Oct 13 21:31:39 crc kubenswrapper[4689]: I1013 21:31:39.607264 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" event={"ID":"1355811c-482d-4b45-b7cb-7e16b64debf6","Type":"ContainerStarted","Data":"3025fb41bffef18484fc89e80a1da7cad2eff9180bdff21aed76e48f061aff47"} Oct 13 21:31:40 crc kubenswrapper[4689]: I1013 21:31:40.619971 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" event={"ID":"1355811c-482d-4b45-b7cb-7e16b64debf6","Type":"ContainerStarted","Data":"bbbee9a9d92327214815f7668af9fcb86fc46bc3e9696c36e3bc4736eccdd5e5"} Oct 13 21:31:40 crc kubenswrapper[4689]: I1013 21:31:40.649381 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" podStartSLOduration=2.226524845 podStartE2EDuration="2.649352801s" podCreationTimestamp="2025-10-13 21:31:38 +0000 UTC" firstStartedPulling="2025-10-13 21:31:39.541940988 +0000 UTC m=+1216.460186113" lastFinishedPulling="2025-10-13 21:31:39.964768984 +0000 UTC m=+1216.883014069" observedRunningTime="2025-10-13 21:31:40.637706887 +0000 UTC m=+1217.555951982" watchObservedRunningTime="2025-10-13 21:31:40.649352801 +0000 UTC m=+1217.567597906" Oct 13 21:31:53 crc kubenswrapper[4689]: I1013 21:31:53.858782 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:31:53 crc kubenswrapper[4689]: I1013 21:31:53.860848 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:32:23 crc kubenswrapper[4689]: I1013 21:32:23.859064 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:32:23 crc kubenswrapper[4689]: I1013 21:32:23.859996 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:32:23 crc kubenswrapper[4689]: I1013 21:32:23.860062 4689 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:32:23 crc kubenswrapper[4689]: I1013 21:32:23.861081 4689 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28"} pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 21:32:23 crc kubenswrapper[4689]: I1013 21:32:23.861160 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" containerID="cri-o://3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28" gracePeriod=600 Oct 13 21:32:24 crc kubenswrapper[4689]: I1013 21:32:24.039327 4689 generic.go:334] "Generic (PLEG): container finished" podID="1863da92-265f-451e-a741-a184c8d3f781" containerID="3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28" exitCode=0 Oct 13 21:32:24 crc kubenswrapper[4689]: I1013 21:32:24.039384 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerDied","Data":"3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28"} Oct 13 21:32:24 crc kubenswrapper[4689]: I1013 21:32:24.039867 4689 scope.go:117] "RemoveContainer" containerID="ab2b61226a649c7705a70b5c1bf03941d31100bc06b01a9ba4b9500ce87dedb9" Oct 13 21:32:25 crc kubenswrapper[4689]: I1013 21:32:25.052782 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerStarted","Data":"c1c3f37ad95c44ab0daca2d4eb5b08efbef51dbd835c6dbf7a731418a7a15ce4"} Oct 13 21:32:30 crc kubenswrapper[4689]: E1013 21:32:30.007976 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28.scope\": RecentStats: unable to find data in memory cache]" Oct 13 21:32:40 crc kubenswrapper[4689]: E1013 21:32:40.286041 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28.scope\": RecentStats: unable to find data in memory cache]" Oct 13 21:32:50 crc kubenswrapper[4689]: E1013 21:32:50.523932 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28.scope\": RecentStats: unable to find data in memory cache]" Oct 13 21:33:00 crc kubenswrapper[4689]: E1013 21:33:00.755089 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28.scope\": RecentStats: unable to find data in memory cache]" Oct 13 21:33:10 crc kubenswrapper[4689]: E1013 21:33:10.974444 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28.scope\": RecentStats: unable to find data in memory cache]" Oct 13 21:33:21 crc kubenswrapper[4689]: E1013 21:33:21.199759 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28.scope\": RecentStats: unable to find data in memory cache]" Oct 13 21:33:25 crc kubenswrapper[4689]: I1013 21:33:25.465920 4689 scope.go:117] "RemoveContainer" containerID="0521ae12e45071004a2a2c5f88eae109b038af98b43728296a634db0b47efd07" Oct 13 21:33:25 crc kubenswrapper[4689]: I1013 21:33:25.506906 4689 scope.go:117] "RemoveContainer" containerID="c2411c36b63d5bc86fd3c7da8260f1554e7750d582b6616ac3583bcf8a37d0e6" Oct 13 21:33:25 crc kubenswrapper[4689]: I1013 21:33:25.534616 4689 scope.go:117] "RemoveContainer" containerID="2c2665bedfc49bbf4f93f338291d7a3f29cd9dc88a3e007e215203e698df298b" Oct 13 21:33:25 crc kubenswrapper[4689]: I1013 21:33:25.572884 4689 scope.go:117] "RemoveContainer" containerID="56c9c5bb6ca28063d276ff3a301de8321837344a9e159354626434507914970b" Oct 13 21:33:25 crc kubenswrapper[4689]: I1013 21:33:25.600417 4689 scope.go:117] "RemoveContainer" containerID="10026754ade7952a531a6c0d31017c5426935f0445c97a451ea36b5a28923c9d" Oct 13 21:34:46 crc kubenswrapper[4689]: I1013 21:34:46.493273 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4b8l9"] Oct 13 21:34:46 crc kubenswrapper[4689]: I1013 21:34:46.496133 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4b8l9" Oct 13 21:34:46 crc kubenswrapper[4689]: I1013 21:34:46.508420 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4b8l9"] Oct 13 21:34:46 crc kubenswrapper[4689]: I1013 21:34:46.576894 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec9cb8a7-19ed-44b1-b43f-99976702ede7-catalog-content\") pod \"redhat-operators-4b8l9\" (UID: \"ec9cb8a7-19ed-44b1-b43f-99976702ede7\") " pod="openshift-marketplace/redhat-operators-4b8l9" Oct 13 21:34:46 crc kubenswrapper[4689]: I1013 21:34:46.576977 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec9cb8a7-19ed-44b1-b43f-99976702ede7-utilities\") pod \"redhat-operators-4b8l9\" (UID: \"ec9cb8a7-19ed-44b1-b43f-99976702ede7\") " pod="openshift-marketplace/redhat-operators-4b8l9" Oct 13 21:34:46 crc kubenswrapper[4689]: I1013 21:34:46.577010 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cf5pw\" (UniqueName: \"kubernetes.io/projected/ec9cb8a7-19ed-44b1-b43f-99976702ede7-kube-api-access-cf5pw\") pod \"redhat-operators-4b8l9\" (UID: \"ec9cb8a7-19ed-44b1-b43f-99976702ede7\") " pod="openshift-marketplace/redhat-operators-4b8l9" Oct 13 21:34:46 crc kubenswrapper[4689]: I1013 21:34:46.679301 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec9cb8a7-19ed-44b1-b43f-99976702ede7-catalog-content\") pod \"redhat-operators-4b8l9\" (UID: \"ec9cb8a7-19ed-44b1-b43f-99976702ede7\") " pod="openshift-marketplace/redhat-operators-4b8l9" Oct 13 21:34:46 crc kubenswrapper[4689]: I1013 21:34:46.679904 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec9cb8a7-19ed-44b1-b43f-99976702ede7-utilities\") pod \"redhat-operators-4b8l9\" (UID: \"ec9cb8a7-19ed-44b1-b43f-99976702ede7\") " pod="openshift-marketplace/redhat-operators-4b8l9" Oct 13 21:34:46 crc kubenswrapper[4689]: I1013 21:34:46.679950 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec9cb8a7-19ed-44b1-b43f-99976702ede7-catalog-content\") pod \"redhat-operators-4b8l9\" (UID: \"ec9cb8a7-19ed-44b1-b43f-99976702ede7\") " pod="openshift-marketplace/redhat-operators-4b8l9" Oct 13 21:34:46 crc kubenswrapper[4689]: I1013 21:34:46.680169 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cf5pw\" (UniqueName: \"kubernetes.io/projected/ec9cb8a7-19ed-44b1-b43f-99976702ede7-kube-api-access-cf5pw\") pod \"redhat-operators-4b8l9\" (UID: \"ec9cb8a7-19ed-44b1-b43f-99976702ede7\") " pod="openshift-marketplace/redhat-operators-4b8l9" Oct 13 21:34:46 crc kubenswrapper[4689]: I1013 21:34:46.680372 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec9cb8a7-19ed-44b1-b43f-99976702ede7-utilities\") pod \"redhat-operators-4b8l9\" (UID: \"ec9cb8a7-19ed-44b1-b43f-99976702ede7\") " pod="openshift-marketplace/redhat-operators-4b8l9" Oct 13 21:34:46 crc kubenswrapper[4689]: I1013 21:34:46.703657 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cf5pw\" (UniqueName: \"kubernetes.io/projected/ec9cb8a7-19ed-44b1-b43f-99976702ede7-kube-api-access-cf5pw\") pod \"redhat-operators-4b8l9\" (UID: \"ec9cb8a7-19ed-44b1-b43f-99976702ede7\") " pod="openshift-marketplace/redhat-operators-4b8l9" Oct 13 21:34:46 crc kubenswrapper[4689]: I1013 21:34:46.820149 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4b8l9" Oct 13 21:34:47 crc kubenswrapper[4689]: I1013 21:34:47.270409 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4b8l9"] Oct 13 21:34:47 crc kubenswrapper[4689]: I1013 21:34:47.473019 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4b8l9" event={"ID":"ec9cb8a7-19ed-44b1-b43f-99976702ede7","Type":"ContainerStarted","Data":"ec2f0b701854b17717b0297a4f9dbe6fde6ae45e7014609212dd1ab4e71b9c5b"} Oct 13 21:34:48 crc kubenswrapper[4689]: I1013 21:34:48.482896 4689 generic.go:334] "Generic (PLEG): container finished" podID="ec9cb8a7-19ed-44b1-b43f-99976702ede7" containerID="e60849149a0cf7a95c0a07942c78ce1f07377ec198017afdd2658a9c51cfa9d4" exitCode=0 Oct 13 21:34:48 crc kubenswrapper[4689]: I1013 21:34:48.482981 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4b8l9" event={"ID":"ec9cb8a7-19ed-44b1-b43f-99976702ede7","Type":"ContainerDied","Data":"e60849149a0cf7a95c0a07942c78ce1f07377ec198017afdd2658a9c51cfa9d4"} Oct 13 21:34:50 crc kubenswrapper[4689]: I1013 21:34:50.502960 4689 generic.go:334] "Generic (PLEG): container finished" podID="ec9cb8a7-19ed-44b1-b43f-99976702ede7" containerID="45d341204f5f88eadcf15a69004c081bf72c7e703734276a763fde946b91e152" exitCode=0 Oct 13 21:34:50 crc kubenswrapper[4689]: I1013 21:34:50.503056 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4b8l9" event={"ID":"ec9cb8a7-19ed-44b1-b43f-99976702ede7","Type":"ContainerDied","Data":"45d341204f5f88eadcf15a69004c081bf72c7e703734276a763fde946b91e152"} Oct 13 21:34:51 crc kubenswrapper[4689]: I1013 21:34:51.514965 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4b8l9" event={"ID":"ec9cb8a7-19ed-44b1-b43f-99976702ede7","Type":"ContainerStarted","Data":"f193f2633129670ee44eb66400a3d27c6e3cc933f1467fde1c5d9bf268165793"} Oct 13 21:34:51 crc kubenswrapper[4689]: I1013 21:34:51.540478 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4b8l9" podStartSLOduration=3.128095286 podStartE2EDuration="5.540459559s" podCreationTimestamp="2025-10-13 21:34:46 +0000 UTC" firstStartedPulling="2025-10-13 21:34:48.484893996 +0000 UTC m=+1405.403139081" lastFinishedPulling="2025-10-13 21:34:50.897258269 +0000 UTC m=+1407.815503354" observedRunningTime="2025-10-13 21:34:51.532072571 +0000 UTC m=+1408.450317656" watchObservedRunningTime="2025-10-13 21:34:51.540459559 +0000 UTC m=+1408.458704644" Oct 13 21:34:53 crc kubenswrapper[4689]: I1013 21:34:53.859488 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:34:53 crc kubenswrapper[4689]: I1013 21:34:53.860387 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:34:56 crc kubenswrapper[4689]: I1013 21:34:56.821155 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4b8l9" Oct 13 21:34:56 crc kubenswrapper[4689]: I1013 21:34:56.821515 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4b8l9" Oct 13 21:34:56 crc kubenswrapper[4689]: I1013 21:34:56.870633 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4b8l9" Oct 13 21:34:57 crc kubenswrapper[4689]: I1013 21:34:57.608600 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4b8l9" Oct 13 21:34:57 crc kubenswrapper[4689]: I1013 21:34:57.671373 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4b8l9"] Oct 13 21:34:59 crc kubenswrapper[4689]: I1013 21:34:59.591350 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4b8l9" podUID="ec9cb8a7-19ed-44b1-b43f-99976702ede7" containerName="registry-server" containerID="cri-o://f193f2633129670ee44eb66400a3d27c6e3cc933f1467fde1c5d9bf268165793" gracePeriod=2 Oct 13 21:35:00 crc kubenswrapper[4689]: I1013 21:35:00.602480 4689 generic.go:334] "Generic (PLEG): container finished" podID="ec9cb8a7-19ed-44b1-b43f-99976702ede7" containerID="f193f2633129670ee44eb66400a3d27c6e3cc933f1467fde1c5d9bf268165793" exitCode=0 Oct 13 21:35:00 crc kubenswrapper[4689]: I1013 21:35:00.602858 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4b8l9" event={"ID":"ec9cb8a7-19ed-44b1-b43f-99976702ede7","Type":"ContainerDied","Data":"f193f2633129670ee44eb66400a3d27c6e3cc933f1467fde1c5d9bf268165793"} Oct 13 21:35:00 crc kubenswrapper[4689]: I1013 21:35:00.602890 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4b8l9" event={"ID":"ec9cb8a7-19ed-44b1-b43f-99976702ede7","Type":"ContainerDied","Data":"ec2f0b701854b17717b0297a4f9dbe6fde6ae45e7014609212dd1ab4e71b9c5b"} Oct 13 21:35:00 crc kubenswrapper[4689]: I1013 21:35:00.602906 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec2f0b701854b17717b0297a4f9dbe6fde6ae45e7014609212dd1ab4e71b9c5b" Oct 13 21:35:00 crc kubenswrapper[4689]: I1013 21:35:00.614120 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4b8l9" Oct 13 21:35:00 crc kubenswrapper[4689]: I1013 21:35:00.749333 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec9cb8a7-19ed-44b1-b43f-99976702ede7-utilities\") pod \"ec9cb8a7-19ed-44b1-b43f-99976702ede7\" (UID: \"ec9cb8a7-19ed-44b1-b43f-99976702ede7\") " Oct 13 21:35:00 crc kubenswrapper[4689]: I1013 21:35:00.749373 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cf5pw\" (UniqueName: \"kubernetes.io/projected/ec9cb8a7-19ed-44b1-b43f-99976702ede7-kube-api-access-cf5pw\") pod \"ec9cb8a7-19ed-44b1-b43f-99976702ede7\" (UID: \"ec9cb8a7-19ed-44b1-b43f-99976702ede7\") " Oct 13 21:35:00 crc kubenswrapper[4689]: I1013 21:35:00.750176 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec9cb8a7-19ed-44b1-b43f-99976702ede7-utilities" (OuterVolumeSpecName: "utilities") pod "ec9cb8a7-19ed-44b1-b43f-99976702ede7" (UID: "ec9cb8a7-19ed-44b1-b43f-99976702ede7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:35:00 crc kubenswrapper[4689]: I1013 21:35:00.750479 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec9cb8a7-19ed-44b1-b43f-99976702ede7-catalog-content\") pod \"ec9cb8a7-19ed-44b1-b43f-99976702ede7\" (UID: \"ec9cb8a7-19ed-44b1-b43f-99976702ede7\") " Oct 13 21:35:00 crc kubenswrapper[4689]: I1013 21:35:00.751170 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec9cb8a7-19ed-44b1-b43f-99976702ede7-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:35:00 crc kubenswrapper[4689]: I1013 21:35:00.759265 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec9cb8a7-19ed-44b1-b43f-99976702ede7-kube-api-access-cf5pw" (OuterVolumeSpecName: "kube-api-access-cf5pw") pod "ec9cb8a7-19ed-44b1-b43f-99976702ede7" (UID: "ec9cb8a7-19ed-44b1-b43f-99976702ede7"). InnerVolumeSpecName "kube-api-access-cf5pw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:35:00 crc kubenswrapper[4689]: I1013 21:35:00.835183 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec9cb8a7-19ed-44b1-b43f-99976702ede7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ec9cb8a7-19ed-44b1-b43f-99976702ede7" (UID: "ec9cb8a7-19ed-44b1-b43f-99976702ede7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:35:00 crc kubenswrapper[4689]: I1013 21:35:00.852757 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cf5pw\" (UniqueName: \"kubernetes.io/projected/ec9cb8a7-19ed-44b1-b43f-99976702ede7-kube-api-access-cf5pw\") on node \"crc\" DevicePath \"\"" Oct 13 21:35:00 crc kubenswrapper[4689]: I1013 21:35:00.852790 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec9cb8a7-19ed-44b1-b43f-99976702ede7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:35:01 crc kubenswrapper[4689]: I1013 21:35:01.613497 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4b8l9" Oct 13 21:35:01 crc kubenswrapper[4689]: I1013 21:35:01.647276 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4b8l9"] Oct 13 21:35:01 crc kubenswrapper[4689]: I1013 21:35:01.656342 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4b8l9"] Oct 13 21:35:01 crc kubenswrapper[4689]: I1013 21:35:01.878179 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec9cb8a7-19ed-44b1-b43f-99976702ede7" path="/var/lib/kubelet/pods/ec9cb8a7-19ed-44b1-b43f-99976702ede7/volumes" Oct 13 21:35:15 crc kubenswrapper[4689]: I1013 21:35:15.753123 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" event={"ID":"1355811c-482d-4b45-b7cb-7e16b64debf6","Type":"ContainerDied","Data":"bbbee9a9d92327214815f7668af9fcb86fc46bc3e9696c36e3bc4736eccdd5e5"} Oct 13 21:35:15 crc kubenswrapper[4689]: I1013 21:35:15.753088 4689 generic.go:334] "Generic (PLEG): container finished" podID="1355811c-482d-4b45-b7cb-7e16b64debf6" containerID="bbbee9a9d92327214815f7668af9fcb86fc46bc3e9696c36e3bc4736eccdd5e5" exitCode=0 Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.168879 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.181830 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-bootstrap-combined-ca-bundle\") pod \"1355811c-482d-4b45-b7cb-7e16b64debf6\" (UID: \"1355811c-482d-4b45-b7cb-7e16b64debf6\") " Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.181899 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fvdq\" (UniqueName: \"kubernetes.io/projected/1355811c-482d-4b45-b7cb-7e16b64debf6-kube-api-access-2fvdq\") pod \"1355811c-482d-4b45-b7cb-7e16b64debf6\" (UID: \"1355811c-482d-4b45-b7cb-7e16b64debf6\") " Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.182157 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-inventory\") pod \"1355811c-482d-4b45-b7cb-7e16b64debf6\" (UID: \"1355811c-482d-4b45-b7cb-7e16b64debf6\") " Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.182272 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-ssh-key\") pod \"1355811c-482d-4b45-b7cb-7e16b64debf6\" (UID: \"1355811c-482d-4b45-b7cb-7e16b64debf6\") " Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.189328 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1355811c-482d-4b45-b7cb-7e16b64debf6-kube-api-access-2fvdq" (OuterVolumeSpecName: "kube-api-access-2fvdq") pod "1355811c-482d-4b45-b7cb-7e16b64debf6" (UID: "1355811c-482d-4b45-b7cb-7e16b64debf6"). InnerVolumeSpecName "kube-api-access-2fvdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.193190 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "1355811c-482d-4b45-b7cb-7e16b64debf6" (UID: "1355811c-482d-4b45-b7cb-7e16b64debf6"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.218445 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-inventory" (OuterVolumeSpecName: "inventory") pod "1355811c-482d-4b45-b7cb-7e16b64debf6" (UID: "1355811c-482d-4b45-b7cb-7e16b64debf6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.235655 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1355811c-482d-4b45-b7cb-7e16b64debf6" (UID: "1355811c-482d-4b45-b7cb-7e16b64debf6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.284260 4689 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.284450 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.284542 4689 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1355811c-482d-4b45-b7cb-7e16b64debf6-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.284632 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fvdq\" (UniqueName: \"kubernetes.io/projected/1355811c-482d-4b45-b7cb-7e16b64debf6-kube-api-access-2fvdq\") on node \"crc\" DevicePath \"\"" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.776757 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" event={"ID":"1355811c-482d-4b45-b7cb-7e16b64debf6","Type":"ContainerDied","Data":"3025fb41bffef18484fc89e80a1da7cad2eff9180bdff21aed76e48f061aff47"} Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.776807 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3025fb41bffef18484fc89e80a1da7cad2eff9180bdff21aed76e48f061aff47" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.776821 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.877852 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk"] Oct 13 21:35:17 crc kubenswrapper[4689]: E1013 21:35:17.878255 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec9cb8a7-19ed-44b1-b43f-99976702ede7" containerName="extract-content" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.878284 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec9cb8a7-19ed-44b1-b43f-99976702ede7" containerName="extract-content" Oct 13 21:35:17 crc kubenswrapper[4689]: E1013 21:35:17.878305 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec9cb8a7-19ed-44b1-b43f-99976702ede7" containerName="registry-server" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.878319 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec9cb8a7-19ed-44b1-b43f-99976702ede7" containerName="registry-server" Oct 13 21:35:17 crc kubenswrapper[4689]: E1013 21:35:17.878334 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1355811c-482d-4b45-b7cb-7e16b64debf6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.878347 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="1355811c-482d-4b45-b7cb-7e16b64debf6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 13 21:35:17 crc kubenswrapper[4689]: E1013 21:35:17.878386 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec9cb8a7-19ed-44b1-b43f-99976702ede7" containerName="extract-utilities" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.878399 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec9cb8a7-19ed-44b1-b43f-99976702ede7" containerName="extract-utilities" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.878660 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec9cb8a7-19ed-44b1-b43f-99976702ede7" containerName="registry-server" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.878689 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="1355811c-482d-4b45-b7cb-7e16b64debf6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.879485 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.883952 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk"] Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.885673 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d5nnx" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.885893 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.886045 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.887379 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.896170 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b26af4-3dba-452d-9d66-715747d10f18-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-9dztk\" (UID: \"f9b26af4-3dba-452d-9d66-715747d10f18\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.896284 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b26af4-3dba-452d-9d66-715747d10f18-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-9dztk\" (UID: \"f9b26af4-3dba-452d-9d66-715747d10f18\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.896377 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5gkc\" (UniqueName: \"kubernetes.io/projected/f9b26af4-3dba-452d-9d66-715747d10f18-kube-api-access-c5gkc\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-9dztk\" (UID: \"f9b26af4-3dba-452d-9d66-715747d10f18\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.997953 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b26af4-3dba-452d-9d66-715747d10f18-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-9dztk\" (UID: \"f9b26af4-3dba-452d-9d66-715747d10f18\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.998320 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b26af4-3dba-452d-9d66-715747d10f18-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-9dztk\" (UID: \"f9b26af4-3dba-452d-9d66-715747d10f18\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" Oct 13 21:35:17 crc kubenswrapper[4689]: I1013 21:35:17.998440 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5gkc\" (UniqueName: \"kubernetes.io/projected/f9b26af4-3dba-452d-9d66-715747d10f18-kube-api-access-c5gkc\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-9dztk\" (UID: \"f9b26af4-3dba-452d-9d66-715747d10f18\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" Oct 13 21:35:18 crc kubenswrapper[4689]: I1013 21:35:18.003993 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b26af4-3dba-452d-9d66-715747d10f18-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-9dztk\" (UID: \"f9b26af4-3dba-452d-9d66-715747d10f18\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" Oct 13 21:35:18 crc kubenswrapper[4689]: I1013 21:35:18.008288 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b26af4-3dba-452d-9d66-715747d10f18-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-9dztk\" (UID: \"f9b26af4-3dba-452d-9d66-715747d10f18\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" Oct 13 21:35:18 crc kubenswrapper[4689]: I1013 21:35:18.015950 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5gkc\" (UniqueName: \"kubernetes.io/projected/f9b26af4-3dba-452d-9d66-715747d10f18-kube-api-access-c5gkc\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-9dztk\" (UID: \"f9b26af4-3dba-452d-9d66-715747d10f18\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" Oct 13 21:35:18 crc kubenswrapper[4689]: I1013 21:35:18.208838 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" Oct 13 21:35:18 crc kubenswrapper[4689]: I1013 21:35:18.695151 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk"] Oct 13 21:35:18 crc kubenswrapper[4689]: I1013 21:35:18.788957 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" event={"ID":"f9b26af4-3dba-452d-9d66-715747d10f18","Type":"ContainerStarted","Data":"54142b7e4576170232c17064d89c35ad84b9ca6bf68ebe781a248e3f319a31dc"} Oct 13 21:35:20 crc kubenswrapper[4689]: I1013 21:35:20.810626 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" event={"ID":"f9b26af4-3dba-452d-9d66-715747d10f18","Type":"ContainerStarted","Data":"616aa48fb0326558fc450f2893fb2693c5504edaf91b5dfb020597930552d086"} Oct 13 21:35:20 crc kubenswrapper[4689]: I1013 21:35:20.831355 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" podStartSLOduration=3.098843304 podStartE2EDuration="3.831338853s" podCreationTimestamp="2025-10-13 21:35:17 +0000 UTC" firstStartedPulling="2025-10-13 21:35:18.702484167 +0000 UTC m=+1435.620729252" lastFinishedPulling="2025-10-13 21:35:19.434979716 +0000 UTC m=+1436.353224801" observedRunningTime="2025-10-13 21:35:20.82606937 +0000 UTC m=+1437.744314455" watchObservedRunningTime="2025-10-13 21:35:20.831338853 +0000 UTC m=+1437.749583938" Oct 13 21:35:23 crc kubenswrapper[4689]: I1013 21:35:23.860372 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:35:23 crc kubenswrapper[4689]: I1013 21:35:23.863260 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:35:25 crc kubenswrapper[4689]: I1013 21:35:25.744689 4689 scope.go:117] "RemoveContainer" containerID="b17dfa5b7a6d7615fb94ba281ccf0be06bdf1178e23d7b87523006e66c620b73" Oct 13 21:35:25 crc kubenswrapper[4689]: I1013 21:35:25.769928 4689 scope.go:117] "RemoveContainer" containerID="342be3253311bfd48d306c06adf2ce883aa8b9d82a07e52601825bfe9e933b74" Oct 13 21:35:43 crc kubenswrapper[4689]: I1013 21:35:43.334107 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fhppq"] Oct 13 21:35:43 crc kubenswrapper[4689]: I1013 21:35:43.337363 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fhppq" Oct 13 21:35:43 crc kubenswrapper[4689]: I1013 21:35:43.373960 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e85114ba-40da-42cc-a299-b9810790a9db-utilities\") pod \"certified-operators-fhppq\" (UID: \"e85114ba-40da-42cc-a299-b9810790a9db\") " pod="openshift-marketplace/certified-operators-fhppq" Oct 13 21:35:43 crc kubenswrapper[4689]: I1013 21:35:43.377786 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vz5ns\" (UniqueName: \"kubernetes.io/projected/e85114ba-40da-42cc-a299-b9810790a9db-kube-api-access-vz5ns\") pod \"certified-operators-fhppq\" (UID: \"e85114ba-40da-42cc-a299-b9810790a9db\") " pod="openshift-marketplace/certified-operators-fhppq" Oct 13 21:35:43 crc kubenswrapper[4689]: I1013 21:35:43.379405 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e85114ba-40da-42cc-a299-b9810790a9db-catalog-content\") pod \"certified-operators-fhppq\" (UID: \"e85114ba-40da-42cc-a299-b9810790a9db\") " pod="openshift-marketplace/certified-operators-fhppq" Oct 13 21:35:43 crc kubenswrapper[4689]: I1013 21:35:43.387348 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fhppq"] Oct 13 21:35:43 crc kubenswrapper[4689]: I1013 21:35:43.481442 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e85114ba-40da-42cc-a299-b9810790a9db-utilities\") pod \"certified-operators-fhppq\" (UID: \"e85114ba-40da-42cc-a299-b9810790a9db\") " pod="openshift-marketplace/certified-operators-fhppq" Oct 13 21:35:43 crc kubenswrapper[4689]: I1013 21:35:43.481516 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vz5ns\" (UniqueName: \"kubernetes.io/projected/e85114ba-40da-42cc-a299-b9810790a9db-kube-api-access-vz5ns\") pod \"certified-operators-fhppq\" (UID: \"e85114ba-40da-42cc-a299-b9810790a9db\") " pod="openshift-marketplace/certified-operators-fhppq" Oct 13 21:35:43 crc kubenswrapper[4689]: I1013 21:35:43.481987 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e85114ba-40da-42cc-a299-b9810790a9db-catalog-content\") pod \"certified-operators-fhppq\" (UID: \"e85114ba-40da-42cc-a299-b9810790a9db\") " pod="openshift-marketplace/certified-operators-fhppq" Oct 13 21:35:43 crc kubenswrapper[4689]: I1013 21:35:43.482182 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e85114ba-40da-42cc-a299-b9810790a9db-utilities\") pod \"certified-operators-fhppq\" (UID: \"e85114ba-40da-42cc-a299-b9810790a9db\") " pod="openshift-marketplace/certified-operators-fhppq" Oct 13 21:35:43 crc kubenswrapper[4689]: I1013 21:35:43.482306 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e85114ba-40da-42cc-a299-b9810790a9db-catalog-content\") pod \"certified-operators-fhppq\" (UID: \"e85114ba-40da-42cc-a299-b9810790a9db\") " pod="openshift-marketplace/certified-operators-fhppq" Oct 13 21:35:43 crc kubenswrapper[4689]: I1013 21:35:43.516475 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vz5ns\" (UniqueName: \"kubernetes.io/projected/e85114ba-40da-42cc-a299-b9810790a9db-kube-api-access-vz5ns\") pod \"certified-operators-fhppq\" (UID: \"e85114ba-40da-42cc-a299-b9810790a9db\") " pod="openshift-marketplace/certified-operators-fhppq" Oct 13 21:35:43 crc kubenswrapper[4689]: I1013 21:35:43.669424 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fhppq" Oct 13 21:35:44 crc kubenswrapper[4689]: I1013 21:35:44.216697 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fhppq"] Oct 13 21:35:45 crc kubenswrapper[4689]: I1013 21:35:45.051033 4689 generic.go:334] "Generic (PLEG): container finished" podID="e85114ba-40da-42cc-a299-b9810790a9db" containerID="fc4628f0fa63babd9488b5b92076af44a6a44beb8300234594d64f7819941ae8" exitCode=0 Oct 13 21:35:45 crc kubenswrapper[4689]: I1013 21:35:45.051217 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhppq" event={"ID":"e85114ba-40da-42cc-a299-b9810790a9db","Type":"ContainerDied","Data":"fc4628f0fa63babd9488b5b92076af44a6a44beb8300234594d64f7819941ae8"} Oct 13 21:35:45 crc kubenswrapper[4689]: I1013 21:35:45.051579 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhppq" event={"ID":"e85114ba-40da-42cc-a299-b9810790a9db","Type":"ContainerStarted","Data":"d69c6a2817873aaf2e6b4e1dda0ec4eb20885107dcf74c9ecc7d1c56da612252"} Oct 13 21:35:47 crc kubenswrapper[4689]: I1013 21:35:47.070575 4689 generic.go:334] "Generic (PLEG): container finished" podID="e85114ba-40da-42cc-a299-b9810790a9db" containerID="494dffa6b5dbe27c1724737e95628c658d70134c4331d7117d6909bffab70e0f" exitCode=0 Oct 13 21:35:47 crc kubenswrapper[4689]: I1013 21:35:47.070702 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhppq" event={"ID":"e85114ba-40da-42cc-a299-b9810790a9db","Type":"ContainerDied","Data":"494dffa6b5dbe27c1724737e95628c658d70134c4331d7117d6909bffab70e0f"} Oct 13 21:35:49 crc kubenswrapper[4689]: I1013 21:35:49.090349 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhppq" event={"ID":"e85114ba-40da-42cc-a299-b9810790a9db","Type":"ContainerStarted","Data":"4dfd6868d0293953bfc3cd739faa420965cc88314e5192e636ebe8f4d130bef3"} Oct 13 21:35:49 crc kubenswrapper[4689]: I1013 21:35:49.116005 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fhppq" podStartSLOduration=3.278904993 podStartE2EDuration="6.115987146s" podCreationTimestamp="2025-10-13 21:35:43 +0000 UTC" firstStartedPulling="2025-10-13 21:35:45.053313517 +0000 UTC m=+1461.971558602" lastFinishedPulling="2025-10-13 21:35:47.89039567 +0000 UTC m=+1464.808640755" observedRunningTime="2025-10-13 21:35:49.107665779 +0000 UTC m=+1466.025910864" watchObservedRunningTime="2025-10-13 21:35:49.115987146 +0000 UTC m=+1466.034232231" Oct 13 21:35:53 crc kubenswrapper[4689]: I1013 21:35:53.670168 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fhppq" Oct 13 21:35:53 crc kubenswrapper[4689]: I1013 21:35:53.672575 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fhppq" Oct 13 21:35:53 crc kubenswrapper[4689]: I1013 21:35:53.739158 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fhppq" Oct 13 21:35:53 crc kubenswrapper[4689]: I1013 21:35:53.859321 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:35:53 crc kubenswrapper[4689]: I1013 21:35:53.859403 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:35:53 crc kubenswrapper[4689]: I1013 21:35:53.859478 4689 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:35:53 crc kubenswrapper[4689]: I1013 21:35:53.860601 4689 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c1c3f37ad95c44ab0daca2d4eb5b08efbef51dbd835c6dbf7a731418a7a15ce4"} pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 21:35:53 crc kubenswrapper[4689]: I1013 21:35:53.860672 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" containerID="cri-o://c1c3f37ad95c44ab0daca2d4eb5b08efbef51dbd835c6dbf7a731418a7a15ce4" gracePeriod=600 Oct 13 21:35:54 crc kubenswrapper[4689]: I1013 21:35:54.139570 4689 generic.go:334] "Generic (PLEG): container finished" podID="1863da92-265f-451e-a741-a184c8d3f781" containerID="c1c3f37ad95c44ab0daca2d4eb5b08efbef51dbd835c6dbf7a731418a7a15ce4" exitCode=0 Oct 13 21:35:54 crc kubenswrapper[4689]: I1013 21:35:54.140983 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerDied","Data":"c1c3f37ad95c44ab0daca2d4eb5b08efbef51dbd835c6dbf7a731418a7a15ce4"} Oct 13 21:35:54 crc kubenswrapper[4689]: I1013 21:35:54.141022 4689 scope.go:117] "RemoveContainer" containerID="3565cbc278ad59107c2468a46bd3717ab9ba48c9bc6ab05d8607dae8c6533e28" Oct 13 21:35:54 crc kubenswrapper[4689]: I1013 21:35:54.203291 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fhppq" Oct 13 21:35:54 crc kubenswrapper[4689]: I1013 21:35:54.266022 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fhppq"] Oct 13 21:35:55 crc kubenswrapper[4689]: I1013 21:35:55.159064 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerStarted","Data":"553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737"} Oct 13 21:35:56 crc kubenswrapper[4689]: I1013 21:35:56.163860 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fhppq" podUID="e85114ba-40da-42cc-a299-b9810790a9db" containerName="registry-server" containerID="cri-o://4dfd6868d0293953bfc3cd739faa420965cc88314e5192e636ebe8f4d130bef3" gracePeriod=2 Oct 13 21:35:56 crc kubenswrapper[4689]: I1013 21:35:56.605211 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fhppq" Oct 13 21:35:56 crc kubenswrapper[4689]: I1013 21:35:56.657895 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e85114ba-40da-42cc-a299-b9810790a9db-catalog-content\") pod \"e85114ba-40da-42cc-a299-b9810790a9db\" (UID: \"e85114ba-40da-42cc-a299-b9810790a9db\") " Oct 13 21:35:56 crc kubenswrapper[4689]: I1013 21:35:56.658156 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vz5ns\" (UniqueName: \"kubernetes.io/projected/e85114ba-40da-42cc-a299-b9810790a9db-kube-api-access-vz5ns\") pod \"e85114ba-40da-42cc-a299-b9810790a9db\" (UID: \"e85114ba-40da-42cc-a299-b9810790a9db\") " Oct 13 21:35:56 crc kubenswrapper[4689]: I1013 21:35:56.658228 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e85114ba-40da-42cc-a299-b9810790a9db-utilities\") pod \"e85114ba-40da-42cc-a299-b9810790a9db\" (UID: \"e85114ba-40da-42cc-a299-b9810790a9db\") " Oct 13 21:35:56 crc kubenswrapper[4689]: I1013 21:35:56.658906 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e85114ba-40da-42cc-a299-b9810790a9db-utilities" (OuterVolumeSpecName: "utilities") pod "e85114ba-40da-42cc-a299-b9810790a9db" (UID: "e85114ba-40da-42cc-a299-b9810790a9db"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:35:56 crc kubenswrapper[4689]: I1013 21:35:56.685477 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e85114ba-40da-42cc-a299-b9810790a9db-kube-api-access-vz5ns" (OuterVolumeSpecName: "kube-api-access-vz5ns") pod "e85114ba-40da-42cc-a299-b9810790a9db" (UID: "e85114ba-40da-42cc-a299-b9810790a9db"). InnerVolumeSpecName "kube-api-access-vz5ns". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:35:56 crc kubenswrapper[4689]: I1013 21:35:56.761420 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vz5ns\" (UniqueName: \"kubernetes.io/projected/e85114ba-40da-42cc-a299-b9810790a9db-kube-api-access-vz5ns\") on node \"crc\" DevicePath \"\"" Oct 13 21:35:56 crc kubenswrapper[4689]: I1013 21:35:56.761455 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e85114ba-40da-42cc-a299-b9810790a9db-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.102767 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e85114ba-40da-42cc-a299-b9810790a9db-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e85114ba-40da-42cc-a299-b9810790a9db" (UID: "e85114ba-40da-42cc-a299-b9810790a9db"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.168840 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e85114ba-40da-42cc-a299-b9810790a9db-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.175962 4689 generic.go:334] "Generic (PLEG): container finished" podID="e85114ba-40da-42cc-a299-b9810790a9db" containerID="4dfd6868d0293953bfc3cd739faa420965cc88314e5192e636ebe8f4d130bef3" exitCode=0 Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.176012 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhppq" event={"ID":"e85114ba-40da-42cc-a299-b9810790a9db","Type":"ContainerDied","Data":"4dfd6868d0293953bfc3cd739faa420965cc88314e5192e636ebe8f4d130bef3"} Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.176033 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fhppq" Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.176054 4689 scope.go:117] "RemoveContainer" containerID="4dfd6868d0293953bfc3cd739faa420965cc88314e5192e636ebe8f4d130bef3" Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.176040 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhppq" event={"ID":"e85114ba-40da-42cc-a299-b9810790a9db","Type":"ContainerDied","Data":"d69c6a2817873aaf2e6b4e1dda0ec4eb20885107dcf74c9ecc7d1c56da612252"} Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.213106 4689 scope.go:117] "RemoveContainer" containerID="494dffa6b5dbe27c1724737e95628c658d70134c4331d7117d6909bffab70e0f" Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.213834 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fhppq"] Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.222113 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fhppq"] Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.242497 4689 scope.go:117] "RemoveContainer" containerID="fc4628f0fa63babd9488b5b92076af44a6a44beb8300234594d64f7819941ae8" Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.282173 4689 scope.go:117] "RemoveContainer" containerID="4dfd6868d0293953bfc3cd739faa420965cc88314e5192e636ebe8f4d130bef3" Oct 13 21:35:57 crc kubenswrapper[4689]: E1013 21:35:57.282625 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4dfd6868d0293953bfc3cd739faa420965cc88314e5192e636ebe8f4d130bef3\": container with ID starting with 4dfd6868d0293953bfc3cd739faa420965cc88314e5192e636ebe8f4d130bef3 not found: ID does not exist" containerID="4dfd6868d0293953bfc3cd739faa420965cc88314e5192e636ebe8f4d130bef3" Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.282683 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dfd6868d0293953bfc3cd739faa420965cc88314e5192e636ebe8f4d130bef3"} err="failed to get container status \"4dfd6868d0293953bfc3cd739faa420965cc88314e5192e636ebe8f4d130bef3\": rpc error: code = NotFound desc = could not find container \"4dfd6868d0293953bfc3cd739faa420965cc88314e5192e636ebe8f4d130bef3\": container with ID starting with 4dfd6868d0293953bfc3cd739faa420965cc88314e5192e636ebe8f4d130bef3 not found: ID does not exist" Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.282719 4689 scope.go:117] "RemoveContainer" containerID="494dffa6b5dbe27c1724737e95628c658d70134c4331d7117d6909bffab70e0f" Oct 13 21:35:57 crc kubenswrapper[4689]: E1013 21:35:57.283053 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"494dffa6b5dbe27c1724737e95628c658d70134c4331d7117d6909bffab70e0f\": container with ID starting with 494dffa6b5dbe27c1724737e95628c658d70134c4331d7117d6909bffab70e0f not found: ID does not exist" containerID="494dffa6b5dbe27c1724737e95628c658d70134c4331d7117d6909bffab70e0f" Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.283100 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"494dffa6b5dbe27c1724737e95628c658d70134c4331d7117d6909bffab70e0f"} err="failed to get container status \"494dffa6b5dbe27c1724737e95628c658d70134c4331d7117d6909bffab70e0f\": rpc error: code = NotFound desc = could not find container \"494dffa6b5dbe27c1724737e95628c658d70134c4331d7117d6909bffab70e0f\": container with ID starting with 494dffa6b5dbe27c1724737e95628c658d70134c4331d7117d6909bffab70e0f not found: ID does not exist" Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.283135 4689 scope.go:117] "RemoveContainer" containerID="fc4628f0fa63babd9488b5b92076af44a6a44beb8300234594d64f7819941ae8" Oct 13 21:35:57 crc kubenswrapper[4689]: E1013 21:35:57.283603 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc4628f0fa63babd9488b5b92076af44a6a44beb8300234594d64f7819941ae8\": container with ID starting with fc4628f0fa63babd9488b5b92076af44a6a44beb8300234594d64f7819941ae8 not found: ID does not exist" containerID="fc4628f0fa63babd9488b5b92076af44a6a44beb8300234594d64f7819941ae8" Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.283632 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc4628f0fa63babd9488b5b92076af44a6a44beb8300234594d64f7819941ae8"} err="failed to get container status \"fc4628f0fa63babd9488b5b92076af44a6a44beb8300234594d64f7819941ae8\": rpc error: code = NotFound desc = could not find container \"fc4628f0fa63babd9488b5b92076af44a6a44beb8300234594d64f7819941ae8\": container with ID starting with fc4628f0fa63babd9488b5b92076af44a6a44beb8300234594d64f7819941ae8 not found: ID does not exist" Oct 13 21:35:57 crc kubenswrapper[4689]: I1013 21:35:57.881083 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e85114ba-40da-42cc-a299-b9810790a9db" path="/var/lib/kubelet/pods/e85114ba-40da-42cc-a299-b9810790a9db/volumes" Oct 13 21:36:03 crc kubenswrapper[4689]: I1013 21:36:03.041317 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-229q9"] Oct 13 21:36:03 crc kubenswrapper[4689]: I1013 21:36:03.056033 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-vg98r"] Oct 13 21:36:03 crc kubenswrapper[4689]: I1013 21:36:03.067311 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-vg98r"] Oct 13 21:36:03 crc kubenswrapper[4689]: I1013 21:36:03.077538 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-229q9"] Oct 13 21:36:03 crc kubenswrapper[4689]: I1013 21:36:03.881314 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9eac6513-46b3-4591-bf02-7a5cd0200e96" path="/var/lib/kubelet/pods/9eac6513-46b3-4591-bf02-7a5cd0200e96/volumes" Oct 13 21:36:03 crc kubenswrapper[4689]: I1013 21:36:03.882027 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7f7455f-653a-4658-8864-e322e3b1fcb2" path="/var/lib/kubelet/pods/d7f7455f-653a-4658-8864-e322e3b1fcb2/volumes" Oct 13 21:36:06 crc kubenswrapper[4689]: I1013 21:36:06.493844 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vcm46"] Oct 13 21:36:06 crc kubenswrapper[4689]: E1013 21:36:06.494827 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e85114ba-40da-42cc-a299-b9810790a9db" containerName="registry-server" Oct 13 21:36:06 crc kubenswrapper[4689]: I1013 21:36:06.494844 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="e85114ba-40da-42cc-a299-b9810790a9db" containerName="registry-server" Oct 13 21:36:06 crc kubenswrapper[4689]: E1013 21:36:06.494865 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e85114ba-40da-42cc-a299-b9810790a9db" containerName="extract-content" Oct 13 21:36:06 crc kubenswrapper[4689]: I1013 21:36:06.494872 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="e85114ba-40da-42cc-a299-b9810790a9db" containerName="extract-content" Oct 13 21:36:06 crc kubenswrapper[4689]: E1013 21:36:06.494904 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e85114ba-40da-42cc-a299-b9810790a9db" containerName="extract-utilities" Oct 13 21:36:06 crc kubenswrapper[4689]: I1013 21:36:06.494912 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="e85114ba-40da-42cc-a299-b9810790a9db" containerName="extract-utilities" Oct 13 21:36:06 crc kubenswrapper[4689]: I1013 21:36:06.495088 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="e85114ba-40da-42cc-a299-b9810790a9db" containerName="registry-server" Oct 13 21:36:06 crc kubenswrapper[4689]: I1013 21:36:06.496455 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vcm46" Oct 13 21:36:06 crc kubenswrapper[4689]: I1013 21:36:06.513466 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vcm46"] Oct 13 21:36:06 crc kubenswrapper[4689]: I1013 21:36:06.647376 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-catalog-content\") pod \"community-operators-vcm46\" (UID: \"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92\") " pod="openshift-marketplace/community-operators-vcm46" Oct 13 21:36:06 crc kubenswrapper[4689]: I1013 21:36:06.647445 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-utilities\") pod \"community-operators-vcm46\" (UID: \"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92\") " pod="openshift-marketplace/community-operators-vcm46" Oct 13 21:36:06 crc kubenswrapper[4689]: I1013 21:36:06.647503 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99vcf\" (UniqueName: \"kubernetes.io/projected/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-kube-api-access-99vcf\") pod \"community-operators-vcm46\" (UID: \"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92\") " pod="openshift-marketplace/community-operators-vcm46" Oct 13 21:36:06 crc kubenswrapper[4689]: I1013 21:36:06.749470 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-catalog-content\") pod \"community-operators-vcm46\" (UID: \"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92\") " pod="openshift-marketplace/community-operators-vcm46" Oct 13 21:36:06 crc kubenswrapper[4689]: I1013 21:36:06.749544 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-utilities\") pod \"community-operators-vcm46\" (UID: \"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92\") " pod="openshift-marketplace/community-operators-vcm46" Oct 13 21:36:06 crc kubenswrapper[4689]: I1013 21:36:06.749620 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99vcf\" (UniqueName: \"kubernetes.io/projected/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-kube-api-access-99vcf\") pod \"community-operators-vcm46\" (UID: \"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92\") " pod="openshift-marketplace/community-operators-vcm46" Oct 13 21:36:06 crc kubenswrapper[4689]: I1013 21:36:06.750051 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-catalog-content\") pod \"community-operators-vcm46\" (UID: \"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92\") " pod="openshift-marketplace/community-operators-vcm46" Oct 13 21:36:06 crc kubenswrapper[4689]: I1013 21:36:06.750145 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-utilities\") pod \"community-operators-vcm46\" (UID: \"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92\") " pod="openshift-marketplace/community-operators-vcm46" Oct 13 21:36:06 crc kubenswrapper[4689]: I1013 21:36:06.778824 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99vcf\" (UniqueName: \"kubernetes.io/projected/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-kube-api-access-99vcf\") pod \"community-operators-vcm46\" (UID: \"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92\") " pod="openshift-marketplace/community-operators-vcm46" Oct 13 21:36:06 crc kubenswrapper[4689]: I1013 21:36:06.819163 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vcm46" Oct 13 21:36:07 crc kubenswrapper[4689]: I1013 21:36:07.362385 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vcm46"] Oct 13 21:36:08 crc kubenswrapper[4689]: I1013 21:36:08.292224 4689 generic.go:334] "Generic (PLEG): container finished" podID="9eaafb4f-a207-4cd9-a1ac-c6cea2449a92" containerID="807e0498f54e381f145c2a5c53e3fd96634676062d7d7135da2ac0cd83077ee0" exitCode=0 Oct 13 21:36:08 crc kubenswrapper[4689]: I1013 21:36:08.292333 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vcm46" event={"ID":"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92","Type":"ContainerDied","Data":"807e0498f54e381f145c2a5c53e3fd96634676062d7d7135da2ac0cd83077ee0"} Oct 13 21:36:08 crc kubenswrapper[4689]: I1013 21:36:08.292507 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vcm46" event={"ID":"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92","Type":"ContainerStarted","Data":"37a96b916e893a30b421a03b55571459368fbeb59f8f615ed607c6c6807aa089"} Oct 13 21:36:09 crc kubenswrapper[4689]: I1013 21:36:09.030741 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-fjttp"] Oct 13 21:36:09 crc kubenswrapper[4689]: I1013 21:36:09.040017 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-fjttp"] Oct 13 21:36:09 crc kubenswrapper[4689]: I1013 21:36:09.302555 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vcm46" event={"ID":"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92","Type":"ContainerStarted","Data":"452fc382fc399bff13c98fc626e89156ac2d5c49328112f0a4cf38f3912ad830"} Oct 13 21:36:09 crc kubenswrapper[4689]: I1013 21:36:09.878823 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a4aaec5-dc5e-4500-9aab-16a83f4d60f6" path="/var/lib/kubelet/pods/7a4aaec5-dc5e-4500-9aab-16a83f4d60f6/volumes" Oct 13 21:36:10 crc kubenswrapper[4689]: I1013 21:36:10.314221 4689 generic.go:334] "Generic (PLEG): container finished" podID="9eaafb4f-a207-4cd9-a1ac-c6cea2449a92" containerID="452fc382fc399bff13c98fc626e89156ac2d5c49328112f0a4cf38f3912ad830" exitCode=0 Oct 13 21:36:10 crc kubenswrapper[4689]: I1013 21:36:10.314280 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vcm46" event={"ID":"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92","Type":"ContainerDied","Data":"452fc382fc399bff13c98fc626e89156ac2d5c49328112f0a4cf38f3912ad830"} Oct 13 21:36:10 crc kubenswrapper[4689]: I1013 21:36:10.317422 4689 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 21:36:11 crc kubenswrapper[4689]: I1013 21:36:11.325327 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vcm46" event={"ID":"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92","Type":"ContainerStarted","Data":"6b2537db098c2c49ba304697de20e8cb4601f5a9ac0806b7bbccf524abcd582a"} Oct 13 21:36:11 crc kubenswrapper[4689]: I1013 21:36:11.342908 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vcm46" podStartSLOduration=2.900170307 podStartE2EDuration="5.342890267s" podCreationTimestamp="2025-10-13 21:36:06 +0000 UTC" firstStartedPulling="2025-10-13 21:36:08.294311 +0000 UTC m=+1485.212556105" lastFinishedPulling="2025-10-13 21:36:10.73703098 +0000 UTC m=+1487.655276065" observedRunningTime="2025-10-13 21:36:11.341578686 +0000 UTC m=+1488.259823771" watchObservedRunningTime="2025-10-13 21:36:11.342890267 +0000 UTC m=+1488.261135352" Oct 13 21:36:14 crc kubenswrapper[4689]: I1013 21:36:14.040507 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-004c-account-create-xpm4r"] Oct 13 21:36:14 crc kubenswrapper[4689]: I1013 21:36:14.051658 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-9cd0-account-create-kjc4q"] Oct 13 21:36:14 crc kubenswrapper[4689]: I1013 21:36:14.061990 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-004c-account-create-xpm4r"] Oct 13 21:36:14 crc kubenswrapper[4689]: I1013 21:36:14.069630 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-9cd0-account-create-kjc4q"] Oct 13 21:36:15 crc kubenswrapper[4689]: I1013 21:36:15.879361 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ad8ad88-3a13-433c-b063-3303125e8ee6" path="/var/lib/kubelet/pods/8ad8ad88-3a13-433c-b063-3303125e8ee6/volumes" Oct 13 21:36:15 crc kubenswrapper[4689]: I1013 21:36:15.880882 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b828e6e8-5980-4eb9-a91d-8a0c503c5432" path="/var/lib/kubelet/pods/b828e6e8-5980-4eb9-a91d-8a0c503c5432/volumes" Oct 13 21:36:16 crc kubenswrapper[4689]: I1013 21:36:16.820092 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vcm46" Oct 13 21:36:16 crc kubenswrapper[4689]: I1013 21:36:16.820156 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vcm46" Oct 13 21:36:16 crc kubenswrapper[4689]: I1013 21:36:16.890119 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vcm46" Oct 13 21:36:17 crc kubenswrapper[4689]: I1013 21:36:17.433548 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vcm46" Oct 13 21:36:17 crc kubenswrapper[4689]: I1013 21:36:17.489963 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vcm46"] Oct 13 21:36:19 crc kubenswrapper[4689]: I1013 21:36:19.394955 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vcm46" podUID="9eaafb4f-a207-4cd9-a1ac-c6cea2449a92" containerName="registry-server" containerID="cri-o://6b2537db098c2c49ba304697de20e8cb4601f5a9ac0806b7bbccf524abcd582a" gracePeriod=2 Oct 13 21:36:19 crc kubenswrapper[4689]: I1013 21:36:19.849202 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vcm46" Oct 13 21:36:19 crc kubenswrapper[4689]: I1013 21:36:19.994773 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99vcf\" (UniqueName: \"kubernetes.io/projected/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-kube-api-access-99vcf\") pod \"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92\" (UID: \"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92\") " Oct 13 21:36:19 crc kubenswrapper[4689]: I1013 21:36:19.994913 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-catalog-content\") pod \"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92\" (UID: \"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92\") " Oct 13 21:36:19 crc kubenswrapper[4689]: I1013 21:36:19.994994 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-utilities\") pod \"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92\" (UID: \"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92\") " Oct 13 21:36:19 crc kubenswrapper[4689]: I1013 21:36:19.996337 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-utilities" (OuterVolumeSpecName: "utilities") pod "9eaafb4f-a207-4cd9-a1ac-c6cea2449a92" (UID: "9eaafb4f-a207-4cd9-a1ac-c6cea2449a92"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.008828 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-kube-api-access-99vcf" (OuterVolumeSpecName: "kube-api-access-99vcf") pod "9eaafb4f-a207-4cd9-a1ac-c6cea2449a92" (UID: "9eaafb4f-a207-4cd9-a1ac-c6cea2449a92"). InnerVolumeSpecName "kube-api-access-99vcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.036181 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-b474-account-create-hq4kd"] Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.045805 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9eaafb4f-a207-4cd9-a1ac-c6cea2449a92" (UID: "9eaafb4f-a207-4cd9-a1ac-c6cea2449a92"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.047880 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-b474-account-create-hq4kd"] Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.097724 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99vcf\" (UniqueName: \"kubernetes.io/projected/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-kube-api-access-99vcf\") on node \"crc\" DevicePath \"\"" Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.097764 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.097776 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.405814 4689 generic.go:334] "Generic (PLEG): container finished" podID="9eaafb4f-a207-4cd9-a1ac-c6cea2449a92" containerID="6b2537db098c2c49ba304697de20e8cb4601f5a9ac0806b7bbccf524abcd582a" exitCode=0 Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.406019 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vcm46" event={"ID":"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92","Type":"ContainerDied","Data":"6b2537db098c2c49ba304697de20e8cb4601f5a9ac0806b7bbccf524abcd582a"} Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.406116 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vcm46" Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.406122 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vcm46" event={"ID":"9eaafb4f-a207-4cd9-a1ac-c6cea2449a92","Type":"ContainerDied","Data":"37a96b916e893a30b421a03b55571459368fbeb59f8f615ed607c6c6807aa089"} Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.406146 4689 scope.go:117] "RemoveContainer" containerID="6b2537db098c2c49ba304697de20e8cb4601f5a9ac0806b7bbccf524abcd582a" Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.426997 4689 scope.go:117] "RemoveContainer" containerID="452fc382fc399bff13c98fc626e89156ac2d5c49328112f0a4cf38f3912ad830" Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.444876 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vcm46"] Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.453488 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vcm46"] Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.467727 4689 scope.go:117] "RemoveContainer" containerID="807e0498f54e381f145c2a5c53e3fd96634676062d7d7135da2ac0cd83077ee0" Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.497170 4689 scope.go:117] "RemoveContainer" containerID="6b2537db098c2c49ba304697de20e8cb4601f5a9ac0806b7bbccf524abcd582a" Oct 13 21:36:20 crc kubenswrapper[4689]: E1013 21:36:20.497628 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b2537db098c2c49ba304697de20e8cb4601f5a9ac0806b7bbccf524abcd582a\": container with ID starting with 6b2537db098c2c49ba304697de20e8cb4601f5a9ac0806b7bbccf524abcd582a not found: ID does not exist" containerID="6b2537db098c2c49ba304697de20e8cb4601f5a9ac0806b7bbccf524abcd582a" Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.497658 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b2537db098c2c49ba304697de20e8cb4601f5a9ac0806b7bbccf524abcd582a"} err="failed to get container status \"6b2537db098c2c49ba304697de20e8cb4601f5a9ac0806b7bbccf524abcd582a\": rpc error: code = NotFound desc = could not find container \"6b2537db098c2c49ba304697de20e8cb4601f5a9ac0806b7bbccf524abcd582a\": container with ID starting with 6b2537db098c2c49ba304697de20e8cb4601f5a9ac0806b7bbccf524abcd582a not found: ID does not exist" Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.497683 4689 scope.go:117] "RemoveContainer" containerID="452fc382fc399bff13c98fc626e89156ac2d5c49328112f0a4cf38f3912ad830" Oct 13 21:36:20 crc kubenswrapper[4689]: E1013 21:36:20.498012 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"452fc382fc399bff13c98fc626e89156ac2d5c49328112f0a4cf38f3912ad830\": container with ID starting with 452fc382fc399bff13c98fc626e89156ac2d5c49328112f0a4cf38f3912ad830 not found: ID does not exist" containerID="452fc382fc399bff13c98fc626e89156ac2d5c49328112f0a4cf38f3912ad830" Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.498031 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"452fc382fc399bff13c98fc626e89156ac2d5c49328112f0a4cf38f3912ad830"} err="failed to get container status \"452fc382fc399bff13c98fc626e89156ac2d5c49328112f0a4cf38f3912ad830\": rpc error: code = NotFound desc = could not find container \"452fc382fc399bff13c98fc626e89156ac2d5c49328112f0a4cf38f3912ad830\": container with ID starting with 452fc382fc399bff13c98fc626e89156ac2d5c49328112f0a4cf38f3912ad830 not found: ID does not exist" Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.498044 4689 scope.go:117] "RemoveContainer" containerID="807e0498f54e381f145c2a5c53e3fd96634676062d7d7135da2ac0cd83077ee0" Oct 13 21:36:20 crc kubenswrapper[4689]: E1013 21:36:20.498243 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"807e0498f54e381f145c2a5c53e3fd96634676062d7d7135da2ac0cd83077ee0\": container with ID starting with 807e0498f54e381f145c2a5c53e3fd96634676062d7d7135da2ac0cd83077ee0 not found: ID does not exist" containerID="807e0498f54e381f145c2a5c53e3fd96634676062d7d7135da2ac0cd83077ee0" Oct 13 21:36:20 crc kubenswrapper[4689]: I1013 21:36:20.498261 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"807e0498f54e381f145c2a5c53e3fd96634676062d7d7135da2ac0cd83077ee0"} err="failed to get container status \"807e0498f54e381f145c2a5c53e3fd96634676062d7d7135da2ac0cd83077ee0\": rpc error: code = NotFound desc = could not find container \"807e0498f54e381f145c2a5c53e3fd96634676062d7d7135da2ac0cd83077ee0\": container with ID starting with 807e0498f54e381f145c2a5c53e3fd96634676062d7d7135da2ac0cd83077ee0 not found: ID does not exist" Oct 13 21:36:21 crc kubenswrapper[4689]: I1013 21:36:21.882224 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4659f4b9-dd90-4615-bff5-a75920d17db4" path="/var/lib/kubelet/pods/4659f4b9-dd90-4615-bff5-a75920d17db4/volumes" Oct 13 21:36:21 crc kubenswrapper[4689]: I1013 21:36:21.883076 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9eaafb4f-a207-4cd9-a1ac-c6cea2449a92" path="/var/lib/kubelet/pods/9eaafb4f-a207-4cd9-a1ac-c6cea2449a92/volumes" Oct 13 21:36:25 crc kubenswrapper[4689]: I1013 21:36:25.851219 4689 scope.go:117] "RemoveContainer" containerID="06f380e8df47ef136a4c5c5352312515e5447013df5fbae4cdd3faaf70b28811" Oct 13 21:36:25 crc kubenswrapper[4689]: I1013 21:36:25.877062 4689 scope.go:117] "RemoveContainer" containerID="cfc09172221453f7f745ee5d9273c38cff8d9e942d4bef5b6725f2c6f221f24e" Oct 13 21:36:25 crc kubenswrapper[4689]: I1013 21:36:25.945445 4689 scope.go:117] "RemoveContainer" containerID="c8c7084b223ce09fcf9b8879ceb60f83c486cdd401f3b1852e792a95626ad85e" Oct 13 21:36:25 crc kubenswrapper[4689]: I1013 21:36:25.981253 4689 scope.go:117] "RemoveContainer" containerID="fdd59ac39d2c91bb1af62d5eb898feba32fd0e8c6835d58f0b7d7ecc38decd62" Oct 13 21:36:26 crc kubenswrapper[4689]: I1013 21:36:26.028186 4689 scope.go:117] "RemoveContainer" containerID="df2dfdff0e30fac2112a5f8fb8e1ecb215f91f7f6d7a11e4134d62cf6fb93b2e" Oct 13 21:36:26 crc kubenswrapper[4689]: I1013 21:36:26.072690 4689 scope.go:117] "RemoveContainer" containerID="cf16fed46dcda7a7f398f4bf5fb273fd64bf3193a6225923a439987c2be67084" Oct 13 21:36:44 crc kubenswrapper[4689]: I1013 21:36:44.080032 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-j7vd8"] Oct 13 21:36:44 crc kubenswrapper[4689]: I1013 21:36:44.092196 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-j7vd8"] Oct 13 21:36:44 crc kubenswrapper[4689]: I1013 21:36:44.104067 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-8djwn"] Oct 13 21:36:44 crc kubenswrapper[4689]: I1013 21:36:44.127634 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-nlsl4"] Oct 13 21:36:44 crc kubenswrapper[4689]: I1013 21:36:44.146497 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-8djwn"] Oct 13 21:36:44 crc kubenswrapper[4689]: I1013 21:36:44.157159 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-nlsl4"] Oct 13 21:36:45 crc kubenswrapper[4689]: I1013 21:36:45.882023 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="131d3017-57bd-4d6c-982c-7941d55822b9" path="/var/lib/kubelet/pods/131d3017-57bd-4d6c-982c-7941d55822b9/volumes" Oct 13 21:36:45 crc kubenswrapper[4689]: I1013 21:36:45.883219 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bfcb233-910d-4200-a46c-f6d350f0e252" path="/var/lib/kubelet/pods/5bfcb233-910d-4200-a46c-f6d350f0e252/volumes" Oct 13 21:36:45 crc kubenswrapper[4689]: I1013 21:36:45.884160 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eca580fb-8c72-4c1d-a917-e1e6a614e3cb" path="/var/lib/kubelet/pods/eca580fb-8c72-4c1d-a917-e1e6a614e3cb/volumes" Oct 13 21:36:48 crc kubenswrapper[4689]: I1013 21:36:48.049683 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-2vmxt"] Oct 13 21:36:48 crc kubenswrapper[4689]: I1013 21:36:48.061364 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-2vmxt"] Oct 13 21:36:49 crc kubenswrapper[4689]: I1013 21:36:49.040722 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-hm6gs"] Oct 13 21:36:49 crc kubenswrapper[4689]: I1013 21:36:49.053181 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-hm6gs"] Oct 13 21:36:49 crc kubenswrapper[4689]: I1013 21:36:49.880887 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca29831b-9c1d-4c38-bf2a-c17e40542a0c" path="/var/lib/kubelet/pods/ca29831b-9c1d-4c38-bf2a-c17e40542a0c/volumes" Oct 13 21:36:49 crc kubenswrapper[4689]: I1013 21:36:49.881440 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea77789b-b65d-4659-9169-ef628cda1bc1" path="/var/lib/kubelet/pods/ea77789b-b65d-4659-9169-ef628cda1bc1/volumes" Oct 13 21:37:11 crc kubenswrapper[4689]: I1013 21:37:11.894973 4689 generic.go:334] "Generic (PLEG): container finished" podID="f9b26af4-3dba-452d-9d66-715747d10f18" containerID="616aa48fb0326558fc450f2893fb2693c5504edaf91b5dfb020597930552d086" exitCode=0 Oct 13 21:37:11 crc kubenswrapper[4689]: I1013 21:37:11.895043 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" event={"ID":"f9b26af4-3dba-452d-9d66-715747d10f18","Type":"ContainerDied","Data":"616aa48fb0326558fc450f2893fb2693c5504edaf91b5dfb020597930552d086"} Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.043341 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-2740-account-create-cjdts"] Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.053933 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-8dc8-account-create-h9cqt"] Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.062487 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-93c8-account-create-qzmql"] Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.070031 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-2740-account-create-cjdts"] Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.077119 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-93c8-account-create-qzmql"] Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.085900 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-8dc8-account-create-h9cqt"] Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.401579 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.526275 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5gkc\" (UniqueName: \"kubernetes.io/projected/f9b26af4-3dba-452d-9d66-715747d10f18-kube-api-access-c5gkc\") pod \"f9b26af4-3dba-452d-9d66-715747d10f18\" (UID: \"f9b26af4-3dba-452d-9d66-715747d10f18\") " Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.526506 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b26af4-3dba-452d-9d66-715747d10f18-ssh-key\") pod \"f9b26af4-3dba-452d-9d66-715747d10f18\" (UID: \"f9b26af4-3dba-452d-9d66-715747d10f18\") " Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.526578 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b26af4-3dba-452d-9d66-715747d10f18-inventory\") pod \"f9b26af4-3dba-452d-9d66-715747d10f18\" (UID: \"f9b26af4-3dba-452d-9d66-715747d10f18\") " Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.531891 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9b26af4-3dba-452d-9d66-715747d10f18-kube-api-access-c5gkc" (OuterVolumeSpecName: "kube-api-access-c5gkc") pod "f9b26af4-3dba-452d-9d66-715747d10f18" (UID: "f9b26af4-3dba-452d-9d66-715747d10f18"). InnerVolumeSpecName "kube-api-access-c5gkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.556807 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9b26af4-3dba-452d-9d66-715747d10f18-inventory" (OuterVolumeSpecName: "inventory") pod "f9b26af4-3dba-452d-9d66-715747d10f18" (UID: "f9b26af4-3dba-452d-9d66-715747d10f18"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.557220 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9b26af4-3dba-452d-9d66-715747d10f18-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f9b26af4-3dba-452d-9d66-715747d10f18" (UID: "f9b26af4-3dba-452d-9d66-715747d10f18"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.629805 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b26af4-3dba-452d-9d66-715747d10f18-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.629949 4689 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b26af4-3dba-452d-9d66-715747d10f18-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.630047 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5gkc\" (UniqueName: \"kubernetes.io/projected/f9b26af4-3dba-452d-9d66-715747d10f18-kube-api-access-c5gkc\") on node \"crc\" DevicePath \"\"" Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.886814 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06e1fd87-93da-41a7-8e6d-6477bcaa7bec" path="/var/lib/kubelet/pods/06e1fd87-93da-41a7-8e6d-6477bcaa7bec/volumes" Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.888432 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e7989d-06bf-4909-848c-92a353d7be6f" path="/var/lib/kubelet/pods/25e7989d-06bf-4909-848c-92a353d7be6f/volumes" Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.889537 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e0d19eb-6990-416e-b3cc-3f696c23146a" path="/var/lib/kubelet/pods/8e0d19eb-6990-416e-b3cc-3f696c23146a/volumes" Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.916808 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" event={"ID":"f9b26af4-3dba-452d-9d66-715747d10f18","Type":"ContainerDied","Data":"54142b7e4576170232c17064d89c35ad84b9ca6bf68ebe781a248e3f319a31dc"} Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.916844 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54142b7e4576170232c17064d89c35ad84b9ca6bf68ebe781a248e3f319a31dc" Oct 13 21:37:13 crc kubenswrapper[4689]: I1013 21:37:13.916903 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-9dztk" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.008914 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg"] Oct 13 21:37:14 crc kubenswrapper[4689]: E1013 21:37:14.035109 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9eaafb4f-a207-4cd9-a1ac-c6cea2449a92" containerName="registry-server" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.035156 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="9eaafb4f-a207-4cd9-a1ac-c6cea2449a92" containerName="registry-server" Oct 13 21:37:14 crc kubenswrapper[4689]: E1013 21:37:14.035197 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9eaafb4f-a207-4cd9-a1ac-c6cea2449a92" containerName="extract-content" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.035209 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="9eaafb4f-a207-4cd9-a1ac-c6cea2449a92" containerName="extract-content" Oct 13 21:37:14 crc kubenswrapper[4689]: E1013 21:37:14.035248 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9b26af4-3dba-452d-9d66-715747d10f18" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.035263 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9b26af4-3dba-452d-9d66-715747d10f18" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 13 21:37:14 crc kubenswrapper[4689]: E1013 21:37:14.035279 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9eaafb4f-a207-4cd9-a1ac-c6cea2449a92" containerName="extract-utilities" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.035290 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="9eaafb4f-a207-4cd9-a1ac-c6cea2449a92" containerName="extract-utilities" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.035640 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="9eaafb4f-a207-4cd9-a1ac-c6cea2449a92" containerName="registry-server" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.035681 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9b26af4-3dba-452d-9d66-715747d10f18" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.036413 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.037384 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg"] Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.038496 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.038824 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.038831 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.038851 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d5nnx" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.142502 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8d4f189-4446-410c-8cfd-b1cf669221db-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-cltfg\" (UID: \"a8d4f189-4446-410c-8cfd-b1cf669221db\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.142855 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8d4f189-4446-410c-8cfd-b1cf669221db-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-cltfg\" (UID: \"a8d4f189-4446-410c-8cfd-b1cf669221db\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.142983 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5vb2\" (UniqueName: \"kubernetes.io/projected/a8d4f189-4446-410c-8cfd-b1cf669221db-kube-api-access-p5vb2\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-cltfg\" (UID: \"a8d4f189-4446-410c-8cfd-b1cf669221db\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.244993 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8d4f189-4446-410c-8cfd-b1cf669221db-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-cltfg\" (UID: \"a8d4f189-4446-410c-8cfd-b1cf669221db\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.245068 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5vb2\" (UniqueName: \"kubernetes.io/projected/a8d4f189-4446-410c-8cfd-b1cf669221db-kube-api-access-p5vb2\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-cltfg\" (UID: \"a8d4f189-4446-410c-8cfd-b1cf669221db\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.245126 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8d4f189-4446-410c-8cfd-b1cf669221db-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-cltfg\" (UID: \"a8d4f189-4446-410c-8cfd-b1cf669221db\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.250133 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8d4f189-4446-410c-8cfd-b1cf669221db-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-cltfg\" (UID: \"a8d4f189-4446-410c-8cfd-b1cf669221db\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.262896 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8d4f189-4446-410c-8cfd-b1cf669221db-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-cltfg\" (UID: \"a8d4f189-4446-410c-8cfd-b1cf669221db\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.265080 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5vb2\" (UniqueName: \"kubernetes.io/projected/a8d4f189-4446-410c-8cfd-b1cf669221db-kube-api-access-p5vb2\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-cltfg\" (UID: \"a8d4f189-4446-410c-8cfd-b1cf669221db\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.362995 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.883873 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg"] Oct 13 21:37:14 crc kubenswrapper[4689]: I1013 21:37:14.924572 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" event={"ID":"a8d4f189-4446-410c-8cfd-b1cf669221db","Type":"ContainerStarted","Data":"8782219882b1c1c2e13ee64434f6725a4922a41bc85d9d6e3943419829c75865"} Oct 13 21:37:15 crc kubenswrapper[4689]: I1013 21:37:15.042870 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-w2fqn"] Oct 13 21:37:15 crc kubenswrapper[4689]: I1013 21:37:15.049655 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-w2fqn"] Oct 13 21:37:15 crc kubenswrapper[4689]: I1013 21:37:15.880669 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73cfa0da-2c68-445e-9d41-c2cee9857776" path="/var/lib/kubelet/pods/73cfa0da-2c68-445e-9d41-c2cee9857776/volumes" Oct 13 21:37:15 crc kubenswrapper[4689]: I1013 21:37:15.937383 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" event={"ID":"a8d4f189-4446-410c-8cfd-b1cf669221db","Type":"ContainerStarted","Data":"d3ac6765a4c05cb093ed0fe276138d39812bb3ed32c7a0b1685996c7554f4a47"} Oct 13 21:37:15 crc kubenswrapper[4689]: I1013 21:37:15.958200 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" podStartSLOduration=2.458252391 podStartE2EDuration="2.958174834s" podCreationTimestamp="2025-10-13 21:37:13 +0000 UTC" firstStartedPulling="2025-10-13 21:37:14.89132185 +0000 UTC m=+1551.809566935" lastFinishedPulling="2025-10-13 21:37:15.391244263 +0000 UTC m=+1552.309489378" observedRunningTime="2025-10-13 21:37:15.954119248 +0000 UTC m=+1552.872364343" watchObservedRunningTime="2025-10-13 21:37:15.958174834 +0000 UTC m=+1552.876419929" Oct 13 21:37:26 crc kubenswrapper[4689]: I1013 21:37:26.261372 4689 scope.go:117] "RemoveContainer" containerID="59182b453705fbe6f4eda75e4f5bdc05616128de25d100706b8a4a962f9a6036" Oct 13 21:37:26 crc kubenswrapper[4689]: I1013 21:37:26.309107 4689 scope.go:117] "RemoveContainer" containerID="74c4c5a1c1dbb02b03a9c7932c3d6a7e0c8cd65ebfebdbb1887ceff667a56093" Oct 13 21:37:26 crc kubenswrapper[4689]: I1013 21:37:26.339084 4689 scope.go:117] "RemoveContainer" containerID="78b79c7aecd557d59c367100d13aac29d5d79e76d94c6ee6e53dbc588e134cc9" Oct 13 21:37:26 crc kubenswrapper[4689]: I1013 21:37:26.384353 4689 scope.go:117] "RemoveContainer" containerID="5337cca8d97f7f25ac226b6c3a8b8c37c48a87f1b8755bff59f182b952ff5f20" Oct 13 21:37:26 crc kubenswrapper[4689]: I1013 21:37:26.424124 4689 scope.go:117] "RemoveContainer" containerID="04021a95b96427d380934b4cdfcaf8997e9eb782d26f3fd374e21abfba6fdc49" Oct 13 21:37:26 crc kubenswrapper[4689]: I1013 21:37:26.468777 4689 scope.go:117] "RemoveContainer" containerID="74bef32fe2ec32d7dd58994f00cf963830c4677bd17ac6e9c28409bc30fc1d54" Oct 13 21:37:26 crc kubenswrapper[4689]: I1013 21:37:26.510614 4689 scope.go:117] "RemoveContainer" containerID="195163e77bd1d175263ea3301b4458341e38edf1533b768e26f9ffa0e1f05a5d" Oct 13 21:37:26 crc kubenswrapper[4689]: I1013 21:37:26.533225 4689 scope.go:117] "RemoveContainer" containerID="71c8ac33010c29d283dec138712b33674ff75ae4aa042d2ed76733062146f7f9" Oct 13 21:37:26 crc kubenswrapper[4689]: I1013 21:37:26.563944 4689 scope.go:117] "RemoveContainer" containerID="9e23783a9bd750e056498c4c377674ea9ba8f78c712ebe2d94862af2489fb4b6" Oct 13 21:37:30 crc kubenswrapper[4689]: I1013 21:37:30.042691 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-4cb78"] Oct 13 21:37:30 crc kubenswrapper[4689]: I1013 21:37:30.060508 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-4cb78"] Oct 13 21:37:31 crc kubenswrapper[4689]: I1013 21:37:31.880710 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a541987-5ad6-4f2f-b625-5b8105b669e5" path="/var/lib/kubelet/pods/7a541987-5ad6-4f2f-b625-5b8105b669e5/volumes" Oct 13 21:37:44 crc kubenswrapper[4689]: I1013 21:37:44.054753 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-d68g8"] Oct 13 21:37:44 crc kubenswrapper[4689]: I1013 21:37:44.063017 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-d68g8"] Oct 13 21:37:45 crc kubenswrapper[4689]: I1013 21:37:45.876650 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="632487e3-a6f0-4e53-bb4e-33454874ddca" path="/var/lib/kubelet/pods/632487e3-a6f0-4e53-bb4e-33454874ddca/volumes" Oct 13 21:37:52 crc kubenswrapper[4689]: I1013 21:37:52.027754 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-8lmgj"] Oct 13 21:37:52 crc kubenswrapper[4689]: I1013 21:37:52.035420 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-8lmgj"] Oct 13 21:37:53 crc kubenswrapper[4689]: I1013 21:37:53.882999 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33ebfe99-ee3b-49f0-9ffe-1bad91c2f579" path="/var/lib/kubelet/pods/33ebfe99-ee3b-49f0-9ffe-1bad91c2f579/volumes" Oct 13 21:37:58 crc kubenswrapper[4689]: I1013 21:37:58.028489 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-xsdlt"] Oct 13 21:37:58 crc kubenswrapper[4689]: I1013 21:37:58.036796 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-xsdlt"] Oct 13 21:37:59 crc kubenswrapper[4689]: I1013 21:37:59.878239 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c5019e8-86d0-4324-bffc-70583983b377" path="/var/lib/kubelet/pods/2c5019e8-86d0-4324-bffc-70583983b377/volumes" Oct 13 21:38:18 crc kubenswrapper[4689]: I1013 21:38:18.045369 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-kwxsd"] Oct 13 21:38:18 crc kubenswrapper[4689]: I1013 21:38:18.052690 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-cv8cg"] Oct 13 21:38:18 crc kubenswrapper[4689]: I1013 21:38:18.062698 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-tnlv4"] Oct 13 21:38:18 crc kubenswrapper[4689]: I1013 21:38:18.070276 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-kwxsd"] Oct 13 21:38:18 crc kubenswrapper[4689]: I1013 21:38:18.076713 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-cv8cg"] Oct 13 21:38:18 crc kubenswrapper[4689]: I1013 21:38:18.083184 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-tnlv4"] Oct 13 21:38:19 crc kubenswrapper[4689]: I1013 21:38:19.878668 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="132dcc8a-a3c1-4c02-9cd8-be28bf0e006b" path="/var/lib/kubelet/pods/132dcc8a-a3c1-4c02-9cd8-be28bf0e006b/volumes" Oct 13 21:38:19 crc kubenswrapper[4689]: I1013 21:38:19.879475 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc382ef2-90d0-4dd4-89cb-23d5e4dd9327" path="/var/lib/kubelet/pods/cc382ef2-90d0-4dd4-89cb-23d5e4dd9327/volumes" Oct 13 21:38:19 crc kubenswrapper[4689]: I1013 21:38:19.879982 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e882c36f-61b5-436d-ba2e-94f12bbb5010" path="/var/lib/kubelet/pods/e882c36f-61b5-436d-ba2e-94f12bbb5010/volumes" Oct 13 21:38:23 crc kubenswrapper[4689]: I1013 21:38:23.859170 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:38:23 crc kubenswrapper[4689]: I1013 21:38:23.859734 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:38:26 crc kubenswrapper[4689]: I1013 21:38:26.723313 4689 scope.go:117] "RemoveContainer" containerID="522ac4a1ac29734f14e61989d38b83d6927cac1cba68ee5921cc1c2e4bd4d59f" Oct 13 21:38:26 crc kubenswrapper[4689]: I1013 21:38:26.763911 4689 scope.go:117] "RemoveContainer" containerID="f6efecc3b0c1dffd9253a9bf10c4856e50d1a86e84e418e6d1bc3aa2a25690cb" Oct 13 21:38:26 crc kubenswrapper[4689]: I1013 21:38:26.817824 4689 scope.go:117] "RemoveContainer" containerID="7e8165bd07c21504d4ad95368f4e7c5d1ed2d2462e6bae8587b19d39ec43ce7d" Oct 13 21:38:26 crc kubenswrapper[4689]: I1013 21:38:26.847832 4689 scope.go:117] "RemoveContainer" containerID="2beb6bf6d4d5601040839fdeaa62b18849de0f930f1dca4c2b8a47ccdf67c4b0" Oct 13 21:38:26 crc kubenswrapper[4689]: I1013 21:38:26.895995 4689 scope.go:117] "RemoveContainer" containerID="f793658662f1d70ea731e85d48cd1c012dab43f36fbdf77b6d23d5bc801a94a7" Oct 13 21:38:26 crc kubenswrapper[4689]: I1013 21:38:26.945545 4689 scope.go:117] "RemoveContainer" containerID="ebf8c306b50e29cbcf0346c0e80129c61860578949d004d3973700d20dd7a837" Oct 13 21:38:26 crc kubenswrapper[4689]: I1013 21:38:26.982272 4689 scope.go:117] "RemoveContainer" containerID="4d1b81dc38686bdd5314eb45f9b87aacca3c98b674c78e1c0417860ace12fe48" Oct 13 21:38:29 crc kubenswrapper[4689]: I1013 21:38:29.665905 4689 generic.go:334] "Generic (PLEG): container finished" podID="a8d4f189-4446-410c-8cfd-b1cf669221db" containerID="d3ac6765a4c05cb093ed0fe276138d39812bb3ed32c7a0b1685996c7554f4a47" exitCode=0 Oct 13 21:38:29 crc kubenswrapper[4689]: I1013 21:38:29.665984 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" event={"ID":"a8d4f189-4446-410c-8cfd-b1cf669221db","Type":"ContainerDied","Data":"d3ac6765a4c05cb093ed0fe276138d39812bb3ed32c7a0b1685996c7554f4a47"} Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.087956 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.233867 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8d4f189-4446-410c-8cfd-b1cf669221db-ssh-key\") pod \"a8d4f189-4446-410c-8cfd-b1cf669221db\" (UID: \"a8d4f189-4446-410c-8cfd-b1cf669221db\") " Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.234035 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5vb2\" (UniqueName: \"kubernetes.io/projected/a8d4f189-4446-410c-8cfd-b1cf669221db-kube-api-access-p5vb2\") pod \"a8d4f189-4446-410c-8cfd-b1cf669221db\" (UID: \"a8d4f189-4446-410c-8cfd-b1cf669221db\") " Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.234074 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8d4f189-4446-410c-8cfd-b1cf669221db-inventory\") pod \"a8d4f189-4446-410c-8cfd-b1cf669221db\" (UID: \"a8d4f189-4446-410c-8cfd-b1cf669221db\") " Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.240810 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8d4f189-4446-410c-8cfd-b1cf669221db-kube-api-access-p5vb2" (OuterVolumeSpecName: "kube-api-access-p5vb2") pod "a8d4f189-4446-410c-8cfd-b1cf669221db" (UID: "a8d4f189-4446-410c-8cfd-b1cf669221db"). InnerVolumeSpecName "kube-api-access-p5vb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.263504 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8d4f189-4446-410c-8cfd-b1cf669221db-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a8d4f189-4446-410c-8cfd-b1cf669221db" (UID: "a8d4f189-4446-410c-8cfd-b1cf669221db"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.264691 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8d4f189-4446-410c-8cfd-b1cf669221db-inventory" (OuterVolumeSpecName: "inventory") pod "a8d4f189-4446-410c-8cfd-b1cf669221db" (UID: "a8d4f189-4446-410c-8cfd-b1cf669221db"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.336742 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8d4f189-4446-410c-8cfd-b1cf669221db-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.337104 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5vb2\" (UniqueName: \"kubernetes.io/projected/a8d4f189-4446-410c-8cfd-b1cf669221db-kube-api-access-p5vb2\") on node \"crc\" DevicePath \"\"" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.337114 4689 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8d4f189-4446-410c-8cfd-b1cf669221db-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.687526 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" event={"ID":"a8d4f189-4446-410c-8cfd-b1cf669221db","Type":"ContainerDied","Data":"8782219882b1c1c2e13ee64434f6725a4922a41bc85d9d6e3943419829c75865"} Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.687818 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8782219882b1c1c2e13ee64434f6725a4922a41bc85d9d6e3943419829c75865" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.687878 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-cltfg" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.785725 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n"] Oct 13 21:38:31 crc kubenswrapper[4689]: E1013 21:38:31.786318 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8d4f189-4446-410c-8cfd-b1cf669221db" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.786354 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8d4f189-4446-410c-8cfd-b1cf669221db" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.786744 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8d4f189-4446-410c-8cfd-b1cf669221db" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.787679 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.789651 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.789842 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.789977 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.790387 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d5nnx" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.812984 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n"] Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.948362 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aed5fbf9-103b-48fb-b982-61a445ff7f09-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n\" (UID: \"aed5fbf9-103b-48fb-b982-61a445ff7f09\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.948650 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ghlr\" (UniqueName: \"kubernetes.io/projected/aed5fbf9-103b-48fb-b982-61a445ff7f09-kube-api-access-7ghlr\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n\" (UID: \"aed5fbf9-103b-48fb-b982-61a445ff7f09\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" Oct 13 21:38:31 crc kubenswrapper[4689]: I1013 21:38:31.948802 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aed5fbf9-103b-48fb-b982-61a445ff7f09-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n\" (UID: \"aed5fbf9-103b-48fb-b982-61a445ff7f09\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" Oct 13 21:38:32 crc kubenswrapper[4689]: I1013 21:38:32.050445 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aed5fbf9-103b-48fb-b982-61a445ff7f09-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n\" (UID: \"aed5fbf9-103b-48fb-b982-61a445ff7f09\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" Oct 13 21:38:32 crc kubenswrapper[4689]: I1013 21:38:32.050561 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aed5fbf9-103b-48fb-b982-61a445ff7f09-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n\" (UID: \"aed5fbf9-103b-48fb-b982-61a445ff7f09\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" Oct 13 21:38:32 crc kubenswrapper[4689]: I1013 21:38:32.050812 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ghlr\" (UniqueName: \"kubernetes.io/projected/aed5fbf9-103b-48fb-b982-61a445ff7f09-kube-api-access-7ghlr\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n\" (UID: \"aed5fbf9-103b-48fb-b982-61a445ff7f09\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" Oct 13 21:38:32 crc kubenswrapper[4689]: I1013 21:38:32.055326 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aed5fbf9-103b-48fb-b982-61a445ff7f09-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n\" (UID: \"aed5fbf9-103b-48fb-b982-61a445ff7f09\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" Oct 13 21:38:32 crc kubenswrapper[4689]: I1013 21:38:32.055419 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aed5fbf9-103b-48fb-b982-61a445ff7f09-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n\" (UID: \"aed5fbf9-103b-48fb-b982-61a445ff7f09\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" Oct 13 21:38:32 crc kubenswrapper[4689]: I1013 21:38:32.072623 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ghlr\" (UniqueName: \"kubernetes.io/projected/aed5fbf9-103b-48fb-b982-61a445ff7f09-kube-api-access-7ghlr\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n\" (UID: \"aed5fbf9-103b-48fb-b982-61a445ff7f09\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" Oct 13 21:38:32 crc kubenswrapper[4689]: I1013 21:38:32.109802 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" Oct 13 21:38:32 crc kubenswrapper[4689]: I1013 21:38:32.650347 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n"] Oct 13 21:38:32 crc kubenswrapper[4689]: I1013 21:38:32.698222 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" event={"ID":"aed5fbf9-103b-48fb-b982-61a445ff7f09","Type":"ContainerStarted","Data":"c06fd5f130039ad3778e07806bb2baff463e57263efa067a10e70a0d9e2dd6a5"} Oct 13 21:38:33 crc kubenswrapper[4689]: I1013 21:38:33.714188 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" event={"ID":"aed5fbf9-103b-48fb-b982-61a445ff7f09","Type":"ContainerStarted","Data":"c2db29544a2d31a40ea5ceb299e62920e7053734fc0ff16a0bed23813fc72668"} Oct 13 21:38:33 crc kubenswrapper[4689]: I1013 21:38:33.741226 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" podStartSLOduration=2.253607661 podStartE2EDuration="2.741208872s" podCreationTimestamp="2025-10-13 21:38:31 +0000 UTC" firstStartedPulling="2025-10-13 21:38:32.65631561 +0000 UTC m=+1629.574560715" lastFinishedPulling="2025-10-13 21:38:33.143916841 +0000 UTC m=+1630.062161926" observedRunningTime="2025-10-13 21:38:33.737359012 +0000 UTC m=+1630.655604097" watchObservedRunningTime="2025-10-13 21:38:33.741208872 +0000 UTC m=+1630.659453957" Oct 13 21:38:36 crc kubenswrapper[4689]: I1013 21:38:36.060116 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-a47a-account-create-s6fdj"] Oct 13 21:38:36 crc kubenswrapper[4689]: I1013 21:38:36.075992 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-5173-account-create-5rmz2"] Oct 13 21:38:36 crc kubenswrapper[4689]: I1013 21:38:36.105675 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-a47a-account-create-s6fdj"] Oct 13 21:38:36 crc kubenswrapper[4689]: I1013 21:38:36.131516 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-5f42-account-create-jvccr"] Oct 13 21:38:36 crc kubenswrapper[4689]: I1013 21:38:36.148461 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-5f42-account-create-jvccr"] Oct 13 21:38:36 crc kubenswrapper[4689]: I1013 21:38:36.182303 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-5173-account-create-5rmz2"] Oct 13 21:38:37 crc kubenswrapper[4689]: I1013 21:38:37.894000 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d841e40-0d97-4ec7-a0fa-ad896fda51bf" path="/var/lib/kubelet/pods/0d841e40-0d97-4ec7-a0fa-ad896fda51bf/volumes" Oct 13 21:38:37 crc kubenswrapper[4689]: I1013 21:38:37.895558 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68719362-13c4-4fca-8437-e68b33609d60" path="/var/lib/kubelet/pods/68719362-13c4-4fca-8437-e68b33609d60/volumes" Oct 13 21:38:37 crc kubenswrapper[4689]: I1013 21:38:37.896638 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7680d8a1-9a0d-443b-8ac4-08a8e60cccc5" path="/var/lib/kubelet/pods/7680d8a1-9a0d-443b-8ac4-08a8e60cccc5/volumes" Oct 13 21:38:38 crc kubenswrapper[4689]: I1013 21:38:38.757074 4689 generic.go:334] "Generic (PLEG): container finished" podID="aed5fbf9-103b-48fb-b982-61a445ff7f09" containerID="c2db29544a2d31a40ea5ceb299e62920e7053734fc0ff16a0bed23813fc72668" exitCode=0 Oct 13 21:38:38 crc kubenswrapper[4689]: I1013 21:38:38.757161 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" event={"ID":"aed5fbf9-103b-48fb-b982-61a445ff7f09","Type":"ContainerDied","Data":"c2db29544a2d31a40ea5ceb299e62920e7053734fc0ff16a0bed23813fc72668"} Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.204214 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.309226 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aed5fbf9-103b-48fb-b982-61a445ff7f09-ssh-key\") pod \"aed5fbf9-103b-48fb-b982-61a445ff7f09\" (UID: \"aed5fbf9-103b-48fb-b982-61a445ff7f09\") " Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.309302 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ghlr\" (UniqueName: \"kubernetes.io/projected/aed5fbf9-103b-48fb-b982-61a445ff7f09-kube-api-access-7ghlr\") pod \"aed5fbf9-103b-48fb-b982-61a445ff7f09\" (UID: \"aed5fbf9-103b-48fb-b982-61a445ff7f09\") " Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.309482 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aed5fbf9-103b-48fb-b982-61a445ff7f09-inventory\") pod \"aed5fbf9-103b-48fb-b982-61a445ff7f09\" (UID: \"aed5fbf9-103b-48fb-b982-61a445ff7f09\") " Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.315268 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aed5fbf9-103b-48fb-b982-61a445ff7f09-kube-api-access-7ghlr" (OuterVolumeSpecName: "kube-api-access-7ghlr") pod "aed5fbf9-103b-48fb-b982-61a445ff7f09" (UID: "aed5fbf9-103b-48fb-b982-61a445ff7f09"). InnerVolumeSpecName "kube-api-access-7ghlr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.337655 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aed5fbf9-103b-48fb-b982-61a445ff7f09-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "aed5fbf9-103b-48fb-b982-61a445ff7f09" (UID: "aed5fbf9-103b-48fb-b982-61a445ff7f09"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.340303 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aed5fbf9-103b-48fb-b982-61a445ff7f09-inventory" (OuterVolumeSpecName: "inventory") pod "aed5fbf9-103b-48fb-b982-61a445ff7f09" (UID: "aed5fbf9-103b-48fb-b982-61a445ff7f09"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.413205 4689 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aed5fbf9-103b-48fb-b982-61a445ff7f09-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.413247 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aed5fbf9-103b-48fb-b982-61a445ff7f09-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.413260 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ghlr\" (UniqueName: \"kubernetes.io/projected/aed5fbf9-103b-48fb-b982-61a445ff7f09-kube-api-access-7ghlr\") on node \"crc\" DevicePath \"\"" Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.782729 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" event={"ID":"aed5fbf9-103b-48fb-b982-61a445ff7f09","Type":"ContainerDied","Data":"c06fd5f130039ad3778e07806bb2baff463e57263efa067a10e70a0d9e2dd6a5"} Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.783085 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c06fd5f130039ad3778e07806bb2baff463e57263efa067a10e70a0d9e2dd6a5" Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.782892 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n" Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.860463 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h"] Oct 13 21:38:40 crc kubenswrapper[4689]: E1013 21:38:40.860868 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aed5fbf9-103b-48fb-b982-61a445ff7f09" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.860888 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="aed5fbf9-103b-48fb-b982-61a445ff7f09" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.861092 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="aed5fbf9-103b-48fb-b982-61a445ff7f09" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.861750 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.865041 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.865299 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.865545 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d5nnx" Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.867937 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 21:38:40 crc kubenswrapper[4689]: I1013 21:38:40.874868 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h"] Oct 13 21:38:41 crc kubenswrapper[4689]: I1013 21:38:41.024011 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/194d986e-a55b-472d-880a-789fe09fcac0-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pdv7h\" (UID: \"194d986e-a55b-472d-880a-789fe09fcac0\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" Oct 13 21:38:41 crc kubenswrapper[4689]: I1013 21:38:41.024110 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/194d986e-a55b-472d-880a-789fe09fcac0-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pdv7h\" (UID: \"194d986e-a55b-472d-880a-789fe09fcac0\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" Oct 13 21:38:41 crc kubenswrapper[4689]: I1013 21:38:41.024307 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ks97f\" (UniqueName: \"kubernetes.io/projected/194d986e-a55b-472d-880a-789fe09fcac0-kube-api-access-ks97f\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pdv7h\" (UID: \"194d986e-a55b-472d-880a-789fe09fcac0\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" Oct 13 21:38:41 crc kubenswrapper[4689]: I1013 21:38:41.125627 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/194d986e-a55b-472d-880a-789fe09fcac0-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pdv7h\" (UID: \"194d986e-a55b-472d-880a-789fe09fcac0\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" Oct 13 21:38:41 crc kubenswrapper[4689]: I1013 21:38:41.125690 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/194d986e-a55b-472d-880a-789fe09fcac0-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pdv7h\" (UID: \"194d986e-a55b-472d-880a-789fe09fcac0\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" Oct 13 21:38:41 crc kubenswrapper[4689]: I1013 21:38:41.125770 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ks97f\" (UniqueName: \"kubernetes.io/projected/194d986e-a55b-472d-880a-789fe09fcac0-kube-api-access-ks97f\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pdv7h\" (UID: \"194d986e-a55b-472d-880a-789fe09fcac0\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" Oct 13 21:38:41 crc kubenswrapper[4689]: I1013 21:38:41.134185 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/194d986e-a55b-472d-880a-789fe09fcac0-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pdv7h\" (UID: \"194d986e-a55b-472d-880a-789fe09fcac0\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" Oct 13 21:38:41 crc kubenswrapper[4689]: I1013 21:38:41.134418 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/194d986e-a55b-472d-880a-789fe09fcac0-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pdv7h\" (UID: \"194d986e-a55b-472d-880a-789fe09fcac0\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" Oct 13 21:38:41 crc kubenswrapper[4689]: I1013 21:38:41.147328 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ks97f\" (UniqueName: \"kubernetes.io/projected/194d986e-a55b-472d-880a-789fe09fcac0-kube-api-access-ks97f\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pdv7h\" (UID: \"194d986e-a55b-472d-880a-789fe09fcac0\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" Oct 13 21:38:41 crc kubenswrapper[4689]: I1013 21:38:41.178206 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" Oct 13 21:38:41 crc kubenswrapper[4689]: I1013 21:38:41.689322 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h"] Oct 13 21:38:41 crc kubenswrapper[4689]: I1013 21:38:41.791808 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" event={"ID":"194d986e-a55b-472d-880a-789fe09fcac0","Type":"ContainerStarted","Data":"49d589c19a63637da240277a96e33ec6f58080fc45f054ec6f347cfd721a4cba"} Oct 13 21:38:42 crc kubenswrapper[4689]: I1013 21:38:42.800000 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" event={"ID":"194d986e-a55b-472d-880a-789fe09fcac0","Type":"ContainerStarted","Data":"6ac4f230896e66215dc4c6a08852a3afec067aa08c526ce56de516872f08d4f4"} Oct 13 21:38:42 crc kubenswrapper[4689]: I1013 21:38:42.825543 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" podStartSLOduration=2.422659553 podStartE2EDuration="2.825522034s" podCreationTimestamp="2025-10-13 21:38:40 +0000 UTC" firstStartedPulling="2025-10-13 21:38:41.688040066 +0000 UTC m=+1638.606285151" lastFinishedPulling="2025-10-13 21:38:42.090902547 +0000 UTC m=+1639.009147632" observedRunningTime="2025-10-13 21:38:42.817475694 +0000 UTC m=+1639.735720789" watchObservedRunningTime="2025-10-13 21:38:42.825522034 +0000 UTC m=+1639.743767129" Oct 13 21:38:53 crc kubenswrapper[4689]: I1013 21:38:53.859222 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:38:53 crc kubenswrapper[4689]: I1013 21:38:53.860251 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:38:58 crc kubenswrapper[4689]: I1013 21:38:58.062773 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6cfgv"] Oct 13 21:38:58 crc kubenswrapper[4689]: I1013 21:38:58.077350 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6cfgv"] Oct 13 21:38:59 crc kubenswrapper[4689]: I1013 21:38:59.879912 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e95bb20-d69c-4378-b16d-11856c0f4fe2" path="/var/lib/kubelet/pods/7e95bb20-d69c-4378-b16d-11856c0f4fe2/volumes" Oct 13 21:39:15 crc kubenswrapper[4689]: I1013 21:39:15.040477 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pbwb5"] Oct 13 21:39:15 crc kubenswrapper[4689]: I1013 21:39:15.050328 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-5qvqz"] Oct 13 21:39:15 crc kubenswrapper[4689]: I1013 21:39:15.058900 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pbwb5"] Oct 13 21:39:15 crc kubenswrapper[4689]: I1013 21:39:15.066303 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-5qvqz"] Oct 13 21:39:15 crc kubenswrapper[4689]: I1013 21:39:15.885435 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d94648e-97ea-49c5-a6f4-46b96f12ef74" path="/var/lib/kubelet/pods/2d94648e-97ea-49c5-a6f4-46b96f12ef74/volumes" Oct 13 21:39:15 crc kubenswrapper[4689]: I1013 21:39:15.887195 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9590d1d7-97f4-426a-a0e0-8f4d941489bc" path="/var/lib/kubelet/pods/9590d1d7-97f4-426a-a0e0-8f4d941489bc/volumes" Oct 13 21:39:23 crc kubenswrapper[4689]: I1013 21:39:23.172964 4689 generic.go:334] "Generic (PLEG): container finished" podID="194d986e-a55b-472d-880a-789fe09fcac0" containerID="6ac4f230896e66215dc4c6a08852a3afec067aa08c526ce56de516872f08d4f4" exitCode=0 Oct 13 21:39:23 crc kubenswrapper[4689]: I1013 21:39:23.173077 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" event={"ID":"194d986e-a55b-472d-880a-789fe09fcac0","Type":"ContainerDied","Data":"6ac4f230896e66215dc4c6a08852a3afec067aa08c526ce56de516872f08d4f4"} Oct 13 21:39:23 crc kubenswrapper[4689]: I1013 21:39:23.859009 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:39:23 crc kubenswrapper[4689]: I1013 21:39:23.859494 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:39:23 crc kubenswrapper[4689]: I1013 21:39:23.859554 4689 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:39:23 crc kubenswrapper[4689]: I1013 21:39:23.860518 4689 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737"} pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 21:39:23 crc kubenswrapper[4689]: I1013 21:39:23.860628 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" containerID="cri-o://553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" gracePeriod=600 Oct 13 21:39:24 crc kubenswrapper[4689]: E1013 21:39:24.038959 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:39:24 crc kubenswrapper[4689]: I1013 21:39:24.183354 4689 generic.go:334] "Generic (PLEG): container finished" podID="1863da92-265f-451e-a741-a184c8d3f781" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" exitCode=0 Oct 13 21:39:24 crc kubenswrapper[4689]: I1013 21:39:24.183396 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerDied","Data":"553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737"} Oct 13 21:39:24 crc kubenswrapper[4689]: I1013 21:39:24.183713 4689 scope.go:117] "RemoveContainer" containerID="c1c3f37ad95c44ab0daca2d4eb5b08efbef51dbd835c6dbf7a731418a7a15ce4" Oct 13 21:39:24 crc kubenswrapper[4689]: I1013 21:39:24.184254 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:39:24 crc kubenswrapper[4689]: E1013 21:39:24.184466 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:39:24 crc kubenswrapper[4689]: I1013 21:39:24.614090 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" Oct 13 21:39:24 crc kubenswrapper[4689]: I1013 21:39:24.687354 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/194d986e-a55b-472d-880a-789fe09fcac0-inventory\") pod \"194d986e-a55b-472d-880a-789fe09fcac0\" (UID: \"194d986e-a55b-472d-880a-789fe09fcac0\") " Oct 13 21:39:24 crc kubenswrapper[4689]: I1013 21:39:24.687474 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ks97f\" (UniqueName: \"kubernetes.io/projected/194d986e-a55b-472d-880a-789fe09fcac0-kube-api-access-ks97f\") pod \"194d986e-a55b-472d-880a-789fe09fcac0\" (UID: \"194d986e-a55b-472d-880a-789fe09fcac0\") " Oct 13 21:39:24 crc kubenswrapper[4689]: I1013 21:39:24.687568 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/194d986e-a55b-472d-880a-789fe09fcac0-ssh-key\") pod \"194d986e-a55b-472d-880a-789fe09fcac0\" (UID: \"194d986e-a55b-472d-880a-789fe09fcac0\") " Oct 13 21:39:24 crc kubenswrapper[4689]: I1013 21:39:24.692561 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/194d986e-a55b-472d-880a-789fe09fcac0-kube-api-access-ks97f" (OuterVolumeSpecName: "kube-api-access-ks97f") pod "194d986e-a55b-472d-880a-789fe09fcac0" (UID: "194d986e-a55b-472d-880a-789fe09fcac0"). InnerVolumeSpecName "kube-api-access-ks97f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:39:24 crc kubenswrapper[4689]: I1013 21:39:24.714760 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/194d986e-a55b-472d-880a-789fe09fcac0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "194d986e-a55b-472d-880a-789fe09fcac0" (UID: "194d986e-a55b-472d-880a-789fe09fcac0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:39:24 crc kubenswrapper[4689]: I1013 21:39:24.718448 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/194d986e-a55b-472d-880a-789fe09fcac0-inventory" (OuterVolumeSpecName: "inventory") pod "194d986e-a55b-472d-880a-789fe09fcac0" (UID: "194d986e-a55b-472d-880a-789fe09fcac0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:39:24 crc kubenswrapper[4689]: I1013 21:39:24.792018 4689 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/194d986e-a55b-472d-880a-789fe09fcac0-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 21:39:24 crc kubenswrapper[4689]: I1013 21:39:24.792059 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ks97f\" (UniqueName: \"kubernetes.io/projected/194d986e-a55b-472d-880a-789fe09fcac0-kube-api-access-ks97f\") on node \"crc\" DevicePath \"\"" Oct 13 21:39:24 crc kubenswrapper[4689]: I1013 21:39:24.792076 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/194d986e-a55b-472d-880a-789fe09fcac0-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.192209 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" event={"ID":"194d986e-a55b-472d-880a-789fe09fcac0","Type":"ContainerDied","Data":"49d589c19a63637da240277a96e33ec6f58080fc45f054ec6f347cfd721a4cba"} Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.192267 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49d589c19a63637da240277a96e33ec6f58080fc45f054ec6f347cfd721a4cba" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.192232 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pdv7h" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.320459 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2"] Oct 13 21:39:25 crc kubenswrapper[4689]: E1013 21:39:25.321354 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="194d986e-a55b-472d-880a-789fe09fcac0" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.321442 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="194d986e-a55b-472d-880a-789fe09fcac0" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.321733 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="194d986e-a55b-472d-880a-789fe09fcac0" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.323203 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.326786 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.326874 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.327237 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d5nnx" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.327378 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.330142 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2"] Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.405809 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1cb5a31-9872-40a5-acb9-6755720fe782-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56hq2\" (UID: \"c1cb5a31-9872-40a5-acb9-6755720fe782\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.405904 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1cb5a31-9872-40a5-acb9-6755720fe782-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56hq2\" (UID: \"c1cb5a31-9872-40a5-acb9-6755720fe782\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.406111 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47vk6\" (UniqueName: \"kubernetes.io/projected/c1cb5a31-9872-40a5-acb9-6755720fe782-kube-api-access-47vk6\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56hq2\" (UID: \"c1cb5a31-9872-40a5-acb9-6755720fe782\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.507935 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1cb5a31-9872-40a5-acb9-6755720fe782-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56hq2\" (UID: \"c1cb5a31-9872-40a5-acb9-6755720fe782\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.508029 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47vk6\" (UniqueName: \"kubernetes.io/projected/c1cb5a31-9872-40a5-acb9-6755720fe782-kube-api-access-47vk6\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56hq2\" (UID: \"c1cb5a31-9872-40a5-acb9-6755720fe782\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.508110 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1cb5a31-9872-40a5-acb9-6755720fe782-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56hq2\" (UID: \"c1cb5a31-9872-40a5-acb9-6755720fe782\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.516108 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1cb5a31-9872-40a5-acb9-6755720fe782-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56hq2\" (UID: \"c1cb5a31-9872-40a5-acb9-6755720fe782\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.523078 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1cb5a31-9872-40a5-acb9-6755720fe782-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56hq2\" (UID: \"c1cb5a31-9872-40a5-acb9-6755720fe782\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.524347 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47vk6\" (UniqueName: \"kubernetes.io/projected/c1cb5a31-9872-40a5-acb9-6755720fe782-kube-api-access-47vk6\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-56hq2\" (UID: \"c1cb5a31-9872-40a5-acb9-6755720fe782\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" Oct 13 21:39:25 crc kubenswrapper[4689]: I1013 21:39:25.649567 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" Oct 13 21:39:26 crc kubenswrapper[4689]: I1013 21:39:26.145539 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2"] Oct 13 21:39:26 crc kubenswrapper[4689]: I1013 21:39:26.204088 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" event={"ID":"c1cb5a31-9872-40a5-acb9-6755720fe782","Type":"ContainerStarted","Data":"74ca70c26718351f23e88b7b8bf3ceda8bb3ddc4fa52b68a61e1608541c78e0c"} Oct 13 21:39:27 crc kubenswrapper[4689]: I1013 21:39:27.135164 4689 scope.go:117] "RemoveContainer" containerID="12c3681e6d7a018825098f3a96af711970788d717db880da78bfd946d311acee" Oct 13 21:39:27 crc kubenswrapper[4689]: I1013 21:39:27.173888 4689 scope.go:117] "RemoveContainer" containerID="ec57e759577ee041e0c5636c584630a10cd9d3bdaf113824dd617fb3b838297f" Oct 13 21:39:27 crc kubenswrapper[4689]: I1013 21:39:27.212965 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" event={"ID":"c1cb5a31-9872-40a5-acb9-6755720fe782","Type":"ContainerStarted","Data":"75bdec3e309b1cbf948027bceebf86171559679af514bd2ea7cba5efaa3ce14c"} Oct 13 21:39:27 crc kubenswrapper[4689]: I1013 21:39:27.223422 4689 scope.go:117] "RemoveContainer" containerID="ebb7077e2e08d98c75e554f5b24804658672028f9d2b326aa563d3791e1ee2ce" Oct 13 21:39:27 crc kubenswrapper[4689]: I1013 21:39:27.231491 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" podStartSLOduration=1.680266607 podStartE2EDuration="2.231469425s" podCreationTimestamp="2025-10-13 21:39:25 +0000 UTC" firstStartedPulling="2025-10-13 21:39:26.144922154 +0000 UTC m=+1683.063167239" lastFinishedPulling="2025-10-13 21:39:26.696124982 +0000 UTC m=+1683.614370057" observedRunningTime="2025-10-13 21:39:27.229573129 +0000 UTC m=+1684.147818224" watchObservedRunningTime="2025-10-13 21:39:27.231469425 +0000 UTC m=+1684.149714510" Oct 13 21:39:27 crc kubenswrapper[4689]: I1013 21:39:27.245796 4689 scope.go:117] "RemoveContainer" containerID="bd9a6c99b167e2d144aa438bfe28e8259cebd1fecc61b43dbfdd08b01cdc28fe" Oct 13 21:39:27 crc kubenswrapper[4689]: I1013 21:39:27.286418 4689 scope.go:117] "RemoveContainer" containerID="cc2a6a662cf75ab61080d54e18f6fc621714d2b36508c63f81b2e587c865f98b" Oct 13 21:39:27 crc kubenswrapper[4689]: I1013 21:39:27.306228 4689 scope.go:117] "RemoveContainer" containerID="0e775227d4a03e315be12d2f6903db3dd4dfd3953bdc834df6f36e1f186887e9" Oct 13 21:39:36 crc kubenswrapper[4689]: I1013 21:39:36.868515 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:39:36 crc kubenswrapper[4689]: E1013 21:39:36.869745 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:39:51 crc kubenswrapper[4689]: I1013 21:39:51.867726 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:39:51 crc kubenswrapper[4689]: E1013 21:39:51.868446 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:39:59 crc kubenswrapper[4689]: I1013 21:39:59.053741 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-p9p9j"] Oct 13 21:39:59 crc kubenswrapper[4689]: I1013 21:39:59.061085 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-p9p9j"] Oct 13 21:39:59 crc kubenswrapper[4689]: I1013 21:39:59.882718 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13461759-7aa1-47ea-9bed-2346213bcde6" path="/var/lib/kubelet/pods/13461759-7aa1-47ea-9bed-2346213bcde6/volumes" Oct 13 21:40:02 crc kubenswrapper[4689]: I1013 21:40:02.867170 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:40:02 crc kubenswrapper[4689]: E1013 21:40:02.867958 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:40:13 crc kubenswrapper[4689]: I1013 21:40:13.877201 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:40:13 crc kubenswrapper[4689]: E1013 21:40:13.878268 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:40:26 crc kubenswrapper[4689]: I1013 21:40:26.868294 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:40:26 crc kubenswrapper[4689]: E1013 21:40:26.868998 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:40:27 crc kubenswrapper[4689]: I1013 21:40:27.430692 4689 scope.go:117] "RemoveContainer" containerID="9bef72d4f11edd9532918d10723d2d66ce1e82b755c4a06e11933eff5d56eb1e" Oct 13 21:40:27 crc kubenswrapper[4689]: I1013 21:40:27.740534 4689 generic.go:334] "Generic (PLEG): container finished" podID="c1cb5a31-9872-40a5-acb9-6755720fe782" containerID="75bdec3e309b1cbf948027bceebf86171559679af514bd2ea7cba5efaa3ce14c" exitCode=2 Oct 13 21:40:27 crc kubenswrapper[4689]: I1013 21:40:27.740596 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" event={"ID":"c1cb5a31-9872-40a5-acb9-6755720fe782","Type":"ContainerDied","Data":"75bdec3e309b1cbf948027bceebf86171559679af514bd2ea7cba5efaa3ce14c"} Oct 13 21:40:29 crc kubenswrapper[4689]: I1013 21:40:29.243901 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" Oct 13 21:40:29 crc kubenswrapper[4689]: I1013 21:40:29.303149 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1cb5a31-9872-40a5-acb9-6755720fe782-inventory\") pod \"c1cb5a31-9872-40a5-acb9-6755720fe782\" (UID: \"c1cb5a31-9872-40a5-acb9-6755720fe782\") " Oct 13 21:40:29 crc kubenswrapper[4689]: I1013 21:40:29.303235 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47vk6\" (UniqueName: \"kubernetes.io/projected/c1cb5a31-9872-40a5-acb9-6755720fe782-kube-api-access-47vk6\") pod \"c1cb5a31-9872-40a5-acb9-6755720fe782\" (UID: \"c1cb5a31-9872-40a5-acb9-6755720fe782\") " Oct 13 21:40:29 crc kubenswrapper[4689]: I1013 21:40:29.303492 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1cb5a31-9872-40a5-acb9-6755720fe782-ssh-key\") pod \"c1cb5a31-9872-40a5-acb9-6755720fe782\" (UID: \"c1cb5a31-9872-40a5-acb9-6755720fe782\") " Oct 13 21:40:29 crc kubenswrapper[4689]: I1013 21:40:29.310882 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1cb5a31-9872-40a5-acb9-6755720fe782-kube-api-access-47vk6" (OuterVolumeSpecName: "kube-api-access-47vk6") pod "c1cb5a31-9872-40a5-acb9-6755720fe782" (UID: "c1cb5a31-9872-40a5-acb9-6755720fe782"). InnerVolumeSpecName "kube-api-access-47vk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:40:29 crc kubenswrapper[4689]: I1013 21:40:29.340210 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1cb5a31-9872-40a5-acb9-6755720fe782-inventory" (OuterVolumeSpecName: "inventory") pod "c1cb5a31-9872-40a5-acb9-6755720fe782" (UID: "c1cb5a31-9872-40a5-acb9-6755720fe782"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:40:29 crc kubenswrapper[4689]: I1013 21:40:29.341337 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1cb5a31-9872-40a5-acb9-6755720fe782-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c1cb5a31-9872-40a5-acb9-6755720fe782" (UID: "c1cb5a31-9872-40a5-acb9-6755720fe782"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:40:29 crc kubenswrapper[4689]: I1013 21:40:29.407413 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1cb5a31-9872-40a5-acb9-6755720fe782-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:40:29 crc kubenswrapper[4689]: I1013 21:40:29.407449 4689 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1cb5a31-9872-40a5-acb9-6755720fe782-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 21:40:29 crc kubenswrapper[4689]: I1013 21:40:29.407463 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47vk6\" (UniqueName: \"kubernetes.io/projected/c1cb5a31-9872-40a5-acb9-6755720fe782-kube-api-access-47vk6\") on node \"crc\" DevicePath \"\"" Oct 13 21:40:29 crc kubenswrapper[4689]: I1013 21:40:29.761399 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" event={"ID":"c1cb5a31-9872-40a5-acb9-6755720fe782","Type":"ContainerDied","Data":"74ca70c26718351f23e88b7b8bf3ceda8bb3ddc4fa52b68a61e1608541c78e0c"} Oct 13 21:40:29 crc kubenswrapper[4689]: I1013 21:40:29.761440 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74ca70c26718351f23e88b7b8bf3ceda8bb3ddc4fa52b68a61e1608541c78e0c" Oct 13 21:40:29 crc kubenswrapper[4689]: I1013 21:40:29.761978 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-56hq2" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.027970 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s"] Oct 13 21:40:36 crc kubenswrapper[4689]: E1013 21:40:36.028997 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1cb5a31-9872-40a5-acb9-6755720fe782" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.029015 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1cb5a31-9872-40a5-acb9-6755720fe782" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.029268 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1cb5a31-9872-40a5-acb9-6755720fe782" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.030078 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.035863 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.036045 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.036120 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d5nnx" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.036470 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.042035 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s"] Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.143201 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9d6b52af-e31e-464d-a83b-ce21d37da265-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4b87s\" (UID: \"9d6b52af-e31e-464d-a83b-ce21d37da265\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.143630 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcp8w\" (UniqueName: \"kubernetes.io/projected/9d6b52af-e31e-464d-a83b-ce21d37da265-kube-api-access-rcp8w\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4b87s\" (UID: \"9d6b52af-e31e-464d-a83b-ce21d37da265\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.143944 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9d6b52af-e31e-464d-a83b-ce21d37da265-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4b87s\" (UID: \"9d6b52af-e31e-464d-a83b-ce21d37da265\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.245713 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9d6b52af-e31e-464d-a83b-ce21d37da265-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4b87s\" (UID: \"9d6b52af-e31e-464d-a83b-ce21d37da265\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.245798 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9d6b52af-e31e-464d-a83b-ce21d37da265-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4b87s\" (UID: \"9d6b52af-e31e-464d-a83b-ce21d37da265\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.245882 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcp8w\" (UniqueName: \"kubernetes.io/projected/9d6b52af-e31e-464d-a83b-ce21d37da265-kube-api-access-rcp8w\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4b87s\" (UID: \"9d6b52af-e31e-464d-a83b-ce21d37da265\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.253124 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9d6b52af-e31e-464d-a83b-ce21d37da265-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4b87s\" (UID: \"9d6b52af-e31e-464d-a83b-ce21d37da265\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.256575 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9d6b52af-e31e-464d-a83b-ce21d37da265-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4b87s\" (UID: \"9d6b52af-e31e-464d-a83b-ce21d37da265\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.263853 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcp8w\" (UniqueName: \"kubernetes.io/projected/9d6b52af-e31e-464d-a83b-ce21d37da265-kube-api-access-rcp8w\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4b87s\" (UID: \"9d6b52af-e31e-464d-a83b-ce21d37da265\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.386502 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" Oct 13 21:40:36 crc kubenswrapper[4689]: I1013 21:40:36.933931 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s"] Oct 13 21:40:37 crc kubenswrapper[4689]: I1013 21:40:37.830809 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" event={"ID":"9d6b52af-e31e-464d-a83b-ce21d37da265","Type":"ContainerStarted","Data":"c63d3b7547c298805a1609f04ffe5e5b03373a1169d13538dc02509f6bc3c75d"} Oct 13 21:40:37 crc kubenswrapper[4689]: I1013 21:40:37.831165 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" event={"ID":"9d6b52af-e31e-464d-a83b-ce21d37da265","Type":"ContainerStarted","Data":"7d2383ebc86ce602263a9cf14840eef89a5b9415c592f48c44145ba5a319bb91"} Oct 13 21:40:37 crc kubenswrapper[4689]: I1013 21:40:37.849359 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" podStartSLOduration=1.345077573 podStartE2EDuration="1.849333978s" podCreationTimestamp="2025-10-13 21:40:36 +0000 UTC" firstStartedPulling="2025-10-13 21:40:36.933119606 +0000 UTC m=+1753.851364691" lastFinishedPulling="2025-10-13 21:40:37.437375991 +0000 UTC m=+1754.355621096" observedRunningTime="2025-10-13 21:40:37.846485471 +0000 UTC m=+1754.764730566" watchObservedRunningTime="2025-10-13 21:40:37.849333978 +0000 UTC m=+1754.767579103" Oct 13 21:40:40 crc kubenswrapper[4689]: I1013 21:40:40.867714 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:40:40 crc kubenswrapper[4689]: E1013 21:40:40.868525 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:40:53 crc kubenswrapper[4689]: I1013 21:40:53.880570 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:40:53 crc kubenswrapper[4689]: E1013 21:40:53.881900 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:41:04 crc kubenswrapper[4689]: I1013 21:41:04.867515 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:41:04 crc kubenswrapper[4689]: E1013 21:41:04.868258 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:41:17 crc kubenswrapper[4689]: I1013 21:41:17.868342 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:41:17 crc kubenswrapper[4689]: E1013 21:41:17.869146 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:41:27 crc kubenswrapper[4689]: I1013 21:41:27.539054 4689 scope.go:117] "RemoveContainer" containerID="45d341204f5f88eadcf15a69004c081bf72c7e703734276a763fde946b91e152" Oct 13 21:41:27 crc kubenswrapper[4689]: I1013 21:41:27.563116 4689 scope.go:117] "RemoveContainer" containerID="f193f2633129670ee44eb66400a3d27c6e3cc933f1467fde1c5d9bf268165793" Oct 13 21:41:27 crc kubenswrapper[4689]: I1013 21:41:27.619724 4689 scope.go:117] "RemoveContainer" containerID="e60849149a0cf7a95c0a07942c78ce1f07377ec198017afdd2658a9c51cfa9d4" Oct 13 21:41:28 crc kubenswrapper[4689]: I1013 21:41:28.868709 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:41:28 crc kubenswrapper[4689]: E1013 21:41:28.869040 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:41:31 crc kubenswrapper[4689]: I1013 21:41:31.304664 4689 generic.go:334] "Generic (PLEG): container finished" podID="9d6b52af-e31e-464d-a83b-ce21d37da265" containerID="c63d3b7547c298805a1609f04ffe5e5b03373a1169d13538dc02509f6bc3c75d" exitCode=0 Oct 13 21:41:31 crc kubenswrapper[4689]: I1013 21:41:31.304765 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" event={"ID":"9d6b52af-e31e-464d-a83b-ce21d37da265","Type":"ContainerDied","Data":"c63d3b7547c298805a1609f04ffe5e5b03373a1169d13538dc02509f6bc3c75d"} Oct 13 21:41:32 crc kubenswrapper[4689]: I1013 21:41:32.709416 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" Oct 13 21:41:32 crc kubenswrapper[4689]: I1013 21:41:32.744754 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9d6b52af-e31e-464d-a83b-ce21d37da265-inventory\") pod \"9d6b52af-e31e-464d-a83b-ce21d37da265\" (UID: \"9d6b52af-e31e-464d-a83b-ce21d37da265\") " Oct 13 21:41:32 crc kubenswrapper[4689]: I1013 21:41:32.744907 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rcp8w\" (UniqueName: \"kubernetes.io/projected/9d6b52af-e31e-464d-a83b-ce21d37da265-kube-api-access-rcp8w\") pod \"9d6b52af-e31e-464d-a83b-ce21d37da265\" (UID: \"9d6b52af-e31e-464d-a83b-ce21d37da265\") " Oct 13 21:41:32 crc kubenswrapper[4689]: I1013 21:41:32.745969 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9d6b52af-e31e-464d-a83b-ce21d37da265-ssh-key\") pod \"9d6b52af-e31e-464d-a83b-ce21d37da265\" (UID: \"9d6b52af-e31e-464d-a83b-ce21d37da265\") " Oct 13 21:41:32 crc kubenswrapper[4689]: I1013 21:41:32.752896 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d6b52af-e31e-464d-a83b-ce21d37da265-kube-api-access-rcp8w" (OuterVolumeSpecName: "kube-api-access-rcp8w") pod "9d6b52af-e31e-464d-a83b-ce21d37da265" (UID: "9d6b52af-e31e-464d-a83b-ce21d37da265"). InnerVolumeSpecName "kube-api-access-rcp8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:41:32 crc kubenswrapper[4689]: I1013 21:41:32.773405 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d6b52af-e31e-464d-a83b-ce21d37da265-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9d6b52af-e31e-464d-a83b-ce21d37da265" (UID: "9d6b52af-e31e-464d-a83b-ce21d37da265"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:41:32 crc kubenswrapper[4689]: I1013 21:41:32.783084 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d6b52af-e31e-464d-a83b-ce21d37da265-inventory" (OuterVolumeSpecName: "inventory") pod "9d6b52af-e31e-464d-a83b-ce21d37da265" (UID: "9d6b52af-e31e-464d-a83b-ce21d37da265"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:41:32 crc kubenswrapper[4689]: I1013 21:41:32.849695 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9d6b52af-e31e-464d-a83b-ce21d37da265-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:41:32 crc kubenswrapper[4689]: I1013 21:41:32.849744 4689 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9d6b52af-e31e-464d-a83b-ce21d37da265-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 21:41:32 crc kubenswrapper[4689]: I1013 21:41:32.849756 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rcp8w\" (UniqueName: \"kubernetes.io/projected/9d6b52af-e31e-464d-a83b-ce21d37da265-kube-api-access-rcp8w\") on node \"crc\" DevicePath \"\"" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.322637 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" event={"ID":"9d6b52af-e31e-464d-a83b-ce21d37da265","Type":"ContainerDied","Data":"7d2383ebc86ce602263a9cf14840eef89a5b9415c592f48c44145ba5a319bb91"} Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.322714 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d2383ebc86ce602263a9cf14840eef89a5b9415c592f48c44145ba5a319bb91" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.322753 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4b87s" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.414527 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-dnqdr"] Oct 13 21:41:33 crc kubenswrapper[4689]: E1013 21:41:33.414948 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d6b52af-e31e-464d-a83b-ce21d37da265" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.414970 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d6b52af-e31e-464d-a83b-ce21d37da265" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.415184 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d6b52af-e31e-464d-a83b-ce21d37da265" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.415950 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.417388 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d5nnx" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.418479 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.418816 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.418979 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.431407 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-dnqdr"] Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.465485 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6flfz\" (UniqueName: \"kubernetes.io/projected/1d1c238b-38b4-471f-a55b-706b93036367-kube-api-access-6flfz\") pod \"ssh-known-hosts-edpm-deployment-dnqdr\" (UID: \"1d1c238b-38b4-471f-a55b-706b93036367\") " pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.465561 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1d1c238b-38b4-471f-a55b-706b93036367-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-dnqdr\" (UID: \"1d1c238b-38b4-471f-a55b-706b93036367\") " pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.465653 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1d1c238b-38b4-471f-a55b-706b93036367-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-dnqdr\" (UID: \"1d1c238b-38b4-471f-a55b-706b93036367\") " pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.567784 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6flfz\" (UniqueName: \"kubernetes.io/projected/1d1c238b-38b4-471f-a55b-706b93036367-kube-api-access-6flfz\") pod \"ssh-known-hosts-edpm-deployment-dnqdr\" (UID: \"1d1c238b-38b4-471f-a55b-706b93036367\") " pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.567955 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1d1c238b-38b4-471f-a55b-706b93036367-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-dnqdr\" (UID: \"1d1c238b-38b4-471f-a55b-706b93036367\") " pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.568014 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1d1c238b-38b4-471f-a55b-706b93036367-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-dnqdr\" (UID: \"1d1c238b-38b4-471f-a55b-706b93036367\") " pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.573270 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1d1c238b-38b4-471f-a55b-706b93036367-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-dnqdr\" (UID: \"1d1c238b-38b4-471f-a55b-706b93036367\") " pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.573269 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1d1c238b-38b4-471f-a55b-706b93036367-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-dnqdr\" (UID: \"1d1c238b-38b4-471f-a55b-706b93036367\") " pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.582770 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6flfz\" (UniqueName: \"kubernetes.io/projected/1d1c238b-38b4-471f-a55b-706b93036367-kube-api-access-6flfz\") pod \"ssh-known-hosts-edpm-deployment-dnqdr\" (UID: \"1d1c238b-38b4-471f-a55b-706b93036367\") " pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" Oct 13 21:41:33 crc kubenswrapper[4689]: I1013 21:41:33.731636 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" Oct 13 21:41:34 crc kubenswrapper[4689]: I1013 21:41:34.234160 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-dnqdr"] Oct 13 21:41:34 crc kubenswrapper[4689]: I1013 21:41:34.244224 4689 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 21:41:34 crc kubenswrapper[4689]: I1013 21:41:34.330578 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" event={"ID":"1d1c238b-38b4-471f-a55b-706b93036367","Type":"ContainerStarted","Data":"67b89a1b8feb526f25abf86c977c4e955559907c24317b03ca36b4b848ade628"} Oct 13 21:41:35 crc kubenswrapper[4689]: I1013 21:41:35.345059 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" event={"ID":"1d1c238b-38b4-471f-a55b-706b93036367","Type":"ContainerStarted","Data":"d607671b442a0184494b8195e839396f4b3ff39912aca55069c647043cffb35f"} Oct 13 21:41:35 crc kubenswrapper[4689]: I1013 21:41:35.364855 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" podStartSLOduration=1.9241581970000001 podStartE2EDuration="2.364837286s" podCreationTimestamp="2025-10-13 21:41:33 +0000 UTC" firstStartedPulling="2025-10-13 21:41:34.24394065 +0000 UTC m=+1811.162185745" lastFinishedPulling="2025-10-13 21:41:34.684619749 +0000 UTC m=+1811.602864834" observedRunningTime="2025-10-13 21:41:35.359470609 +0000 UTC m=+1812.277715694" watchObservedRunningTime="2025-10-13 21:41:35.364837286 +0000 UTC m=+1812.283082371" Oct 13 21:41:42 crc kubenswrapper[4689]: I1013 21:41:42.868580 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:41:42 crc kubenswrapper[4689]: E1013 21:41:42.870492 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:41:43 crc kubenswrapper[4689]: I1013 21:41:43.411869 4689 generic.go:334] "Generic (PLEG): container finished" podID="1d1c238b-38b4-471f-a55b-706b93036367" containerID="d607671b442a0184494b8195e839396f4b3ff39912aca55069c647043cffb35f" exitCode=0 Oct 13 21:41:43 crc kubenswrapper[4689]: I1013 21:41:43.411914 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" event={"ID":"1d1c238b-38b4-471f-a55b-706b93036367","Type":"ContainerDied","Data":"d607671b442a0184494b8195e839396f4b3ff39912aca55069c647043cffb35f"} Oct 13 21:41:44 crc kubenswrapper[4689]: I1013 21:41:44.797754 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" Oct 13 21:41:44 crc kubenswrapper[4689]: I1013 21:41:44.962414 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1d1c238b-38b4-471f-a55b-706b93036367-ssh-key-openstack-edpm-ipam\") pod \"1d1c238b-38b4-471f-a55b-706b93036367\" (UID: \"1d1c238b-38b4-471f-a55b-706b93036367\") " Oct 13 21:41:44 crc kubenswrapper[4689]: I1013 21:41:44.962559 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6flfz\" (UniqueName: \"kubernetes.io/projected/1d1c238b-38b4-471f-a55b-706b93036367-kube-api-access-6flfz\") pod \"1d1c238b-38b4-471f-a55b-706b93036367\" (UID: \"1d1c238b-38b4-471f-a55b-706b93036367\") " Oct 13 21:41:44 crc kubenswrapper[4689]: I1013 21:41:44.962740 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1d1c238b-38b4-471f-a55b-706b93036367-inventory-0\") pod \"1d1c238b-38b4-471f-a55b-706b93036367\" (UID: \"1d1c238b-38b4-471f-a55b-706b93036367\") " Oct 13 21:41:44 crc kubenswrapper[4689]: I1013 21:41:44.969031 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d1c238b-38b4-471f-a55b-706b93036367-kube-api-access-6flfz" (OuterVolumeSpecName: "kube-api-access-6flfz") pod "1d1c238b-38b4-471f-a55b-706b93036367" (UID: "1d1c238b-38b4-471f-a55b-706b93036367"). InnerVolumeSpecName "kube-api-access-6flfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:41:44 crc kubenswrapper[4689]: I1013 21:41:44.996002 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d1c238b-38b4-471f-a55b-706b93036367-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "1d1c238b-38b4-471f-a55b-706b93036367" (UID: "1d1c238b-38b4-471f-a55b-706b93036367"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.015564 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d1c238b-38b4-471f-a55b-706b93036367-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "1d1c238b-38b4-471f-a55b-706b93036367" (UID: "1d1c238b-38b4-471f-a55b-706b93036367"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.015623 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk"] Oct 13 21:41:45 crc kubenswrapper[4689]: E1013 21:41:45.016003 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d1c238b-38b4-471f-a55b-706b93036367" containerName="ssh-known-hosts-edpm-deployment" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.016014 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d1c238b-38b4-471f-a55b-706b93036367" containerName="ssh-known-hosts-edpm-deployment" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.016205 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d1c238b-38b4-471f-a55b-706b93036367" containerName="ssh-known-hosts-edpm-deployment" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.019237 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.038251 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk"] Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.065566 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6flfz\" (UniqueName: \"kubernetes.io/projected/1d1c238b-38b4-471f-a55b-706b93036367-kube-api-access-6flfz\") on node \"crc\" DevicePath \"\"" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.065619 4689 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1d1c238b-38b4-471f-a55b-706b93036367-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.065630 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1d1c238b-38b4-471f-a55b-706b93036367-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.167317 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d79295b-e957-48d5-b56e-d84c50ca7250-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wdbxk\" (UID: \"5d79295b-e957-48d5-b56e-d84c50ca7250\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.167424 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxmjh\" (UniqueName: \"kubernetes.io/projected/5d79295b-e957-48d5-b56e-d84c50ca7250-kube-api-access-vxmjh\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wdbxk\" (UID: \"5d79295b-e957-48d5-b56e-d84c50ca7250\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.167521 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d79295b-e957-48d5-b56e-d84c50ca7250-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wdbxk\" (UID: \"5d79295b-e957-48d5-b56e-d84c50ca7250\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.269387 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxmjh\" (UniqueName: \"kubernetes.io/projected/5d79295b-e957-48d5-b56e-d84c50ca7250-kube-api-access-vxmjh\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wdbxk\" (UID: \"5d79295b-e957-48d5-b56e-d84c50ca7250\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.269453 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d79295b-e957-48d5-b56e-d84c50ca7250-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wdbxk\" (UID: \"5d79295b-e957-48d5-b56e-d84c50ca7250\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.269547 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d79295b-e957-48d5-b56e-d84c50ca7250-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wdbxk\" (UID: \"5d79295b-e957-48d5-b56e-d84c50ca7250\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.272832 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d79295b-e957-48d5-b56e-d84c50ca7250-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wdbxk\" (UID: \"5d79295b-e957-48d5-b56e-d84c50ca7250\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.273120 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d79295b-e957-48d5-b56e-d84c50ca7250-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wdbxk\" (UID: \"5d79295b-e957-48d5-b56e-d84c50ca7250\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.284335 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxmjh\" (UniqueName: \"kubernetes.io/projected/5d79295b-e957-48d5-b56e-d84c50ca7250-kube-api-access-vxmjh\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-wdbxk\" (UID: \"5d79295b-e957-48d5-b56e-d84c50ca7250\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.427369 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.428899 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" event={"ID":"1d1c238b-38b4-471f-a55b-706b93036367","Type":"ContainerDied","Data":"67b89a1b8feb526f25abf86c977c4e955559907c24317b03ca36b4b848ade628"} Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.428938 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67b89a1b8feb526f25abf86c977c4e955559907c24317b03ca36b4b848ade628" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.428994 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-dnqdr" Oct 13 21:41:45 crc kubenswrapper[4689]: I1013 21:41:45.923761 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk"] Oct 13 21:41:45 crc kubenswrapper[4689]: W1013 21:41:45.935759 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d79295b_e957_48d5_b56e_d84c50ca7250.slice/crio-42f0dcb6ea375123bd501186f8f225ceb213041b14aebca02b84027af9a34827 WatchSource:0}: Error finding container 42f0dcb6ea375123bd501186f8f225ceb213041b14aebca02b84027af9a34827: Status 404 returned error can't find the container with id 42f0dcb6ea375123bd501186f8f225ceb213041b14aebca02b84027af9a34827 Oct 13 21:41:46 crc kubenswrapper[4689]: I1013 21:41:46.441294 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" event={"ID":"5d79295b-e957-48d5-b56e-d84c50ca7250","Type":"ContainerStarted","Data":"42f0dcb6ea375123bd501186f8f225ceb213041b14aebca02b84027af9a34827"} Oct 13 21:41:47 crc kubenswrapper[4689]: I1013 21:41:47.453536 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" event={"ID":"5d79295b-e957-48d5-b56e-d84c50ca7250","Type":"ContainerStarted","Data":"1c63d54617a0cd7d40c85d0d86d06ed32586f3027809faaaf09513a32441fc7b"} Oct 13 21:41:47 crc kubenswrapper[4689]: I1013 21:41:47.471570 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" podStartSLOduration=2.826485403 podStartE2EDuration="3.471547297s" podCreationTimestamp="2025-10-13 21:41:44 +0000 UTC" firstStartedPulling="2025-10-13 21:41:45.937781223 +0000 UTC m=+1822.856026308" lastFinishedPulling="2025-10-13 21:41:46.582843107 +0000 UTC m=+1823.501088202" observedRunningTime="2025-10-13 21:41:47.468765831 +0000 UTC m=+1824.387010926" watchObservedRunningTime="2025-10-13 21:41:47.471547297 +0000 UTC m=+1824.389792382" Oct 13 21:41:55 crc kubenswrapper[4689]: I1013 21:41:55.540510 4689 generic.go:334] "Generic (PLEG): container finished" podID="5d79295b-e957-48d5-b56e-d84c50ca7250" containerID="1c63d54617a0cd7d40c85d0d86d06ed32586f3027809faaaf09513a32441fc7b" exitCode=0 Oct 13 21:41:55 crc kubenswrapper[4689]: I1013 21:41:55.540607 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" event={"ID":"5d79295b-e957-48d5-b56e-d84c50ca7250","Type":"ContainerDied","Data":"1c63d54617a0cd7d40c85d0d86d06ed32586f3027809faaaf09513a32441fc7b"} Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.036373 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.099451 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d79295b-e957-48d5-b56e-d84c50ca7250-ssh-key\") pod \"5d79295b-e957-48d5-b56e-d84c50ca7250\" (UID: \"5d79295b-e957-48d5-b56e-d84c50ca7250\") " Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.100100 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vxmjh\" (UniqueName: \"kubernetes.io/projected/5d79295b-e957-48d5-b56e-d84c50ca7250-kube-api-access-vxmjh\") pod \"5d79295b-e957-48d5-b56e-d84c50ca7250\" (UID: \"5d79295b-e957-48d5-b56e-d84c50ca7250\") " Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.100197 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d79295b-e957-48d5-b56e-d84c50ca7250-inventory\") pod \"5d79295b-e957-48d5-b56e-d84c50ca7250\" (UID: \"5d79295b-e957-48d5-b56e-d84c50ca7250\") " Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.105828 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d79295b-e957-48d5-b56e-d84c50ca7250-kube-api-access-vxmjh" (OuterVolumeSpecName: "kube-api-access-vxmjh") pod "5d79295b-e957-48d5-b56e-d84c50ca7250" (UID: "5d79295b-e957-48d5-b56e-d84c50ca7250"). InnerVolumeSpecName "kube-api-access-vxmjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.129369 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d79295b-e957-48d5-b56e-d84c50ca7250-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5d79295b-e957-48d5-b56e-d84c50ca7250" (UID: "5d79295b-e957-48d5-b56e-d84c50ca7250"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.131022 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d79295b-e957-48d5-b56e-d84c50ca7250-inventory" (OuterVolumeSpecName: "inventory") pod "5d79295b-e957-48d5-b56e-d84c50ca7250" (UID: "5d79295b-e957-48d5-b56e-d84c50ca7250"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.203262 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vxmjh\" (UniqueName: \"kubernetes.io/projected/5d79295b-e957-48d5-b56e-d84c50ca7250-kube-api-access-vxmjh\") on node \"crc\" DevicePath \"\"" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.203321 4689 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d79295b-e957-48d5-b56e-d84c50ca7250-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.203341 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d79295b-e957-48d5-b56e-d84c50ca7250-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.566309 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" event={"ID":"5d79295b-e957-48d5-b56e-d84c50ca7250","Type":"ContainerDied","Data":"42f0dcb6ea375123bd501186f8f225ceb213041b14aebca02b84027af9a34827"} Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.566769 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42f0dcb6ea375123bd501186f8f225ceb213041b14aebca02b84027af9a34827" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.566480 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-wdbxk" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.662105 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv"] Oct 13 21:41:57 crc kubenswrapper[4689]: E1013 21:41:57.662575 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d79295b-e957-48d5-b56e-d84c50ca7250" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.662608 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d79295b-e957-48d5-b56e-d84c50ca7250" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.662845 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d79295b-e957-48d5-b56e-d84c50ca7250" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.663518 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.666890 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.666890 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.667066 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.668473 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d5nnx" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.671733 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv"] Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.716234 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/024b2226-0636-4d0c-8225-53b2e5ad7050-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv\" (UID: \"024b2226-0636-4d0c-8225-53b2e5ad7050\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.716302 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckddl\" (UniqueName: \"kubernetes.io/projected/024b2226-0636-4d0c-8225-53b2e5ad7050-kube-api-access-ckddl\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv\" (UID: \"024b2226-0636-4d0c-8225-53b2e5ad7050\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.716340 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/024b2226-0636-4d0c-8225-53b2e5ad7050-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv\" (UID: \"024b2226-0636-4d0c-8225-53b2e5ad7050\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.818176 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/024b2226-0636-4d0c-8225-53b2e5ad7050-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv\" (UID: \"024b2226-0636-4d0c-8225-53b2e5ad7050\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.818555 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckddl\" (UniqueName: \"kubernetes.io/projected/024b2226-0636-4d0c-8225-53b2e5ad7050-kube-api-access-ckddl\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv\" (UID: \"024b2226-0636-4d0c-8225-53b2e5ad7050\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.818666 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/024b2226-0636-4d0c-8225-53b2e5ad7050-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv\" (UID: \"024b2226-0636-4d0c-8225-53b2e5ad7050\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.822087 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/024b2226-0636-4d0c-8225-53b2e5ad7050-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv\" (UID: \"024b2226-0636-4d0c-8225-53b2e5ad7050\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.822776 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/024b2226-0636-4d0c-8225-53b2e5ad7050-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv\" (UID: \"024b2226-0636-4d0c-8225-53b2e5ad7050\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.835099 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckddl\" (UniqueName: \"kubernetes.io/projected/024b2226-0636-4d0c-8225-53b2e5ad7050-kube-api-access-ckddl\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv\" (UID: \"024b2226-0636-4d0c-8225-53b2e5ad7050\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.867329 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:41:57 crc kubenswrapper[4689]: E1013 21:41:57.867643 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:41:57 crc kubenswrapper[4689]: I1013 21:41:57.996982 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" Oct 13 21:41:58 crc kubenswrapper[4689]: I1013 21:41:58.573737 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv"] Oct 13 21:41:59 crc kubenswrapper[4689]: I1013 21:41:59.596812 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" event={"ID":"024b2226-0636-4d0c-8225-53b2e5ad7050","Type":"ContainerStarted","Data":"f1129c5eba5e43afd0de761b59c211b106fe2308c6dd8b118a45fe7650e15419"} Oct 13 21:41:59 crc kubenswrapper[4689]: I1013 21:41:59.598553 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" event={"ID":"024b2226-0636-4d0c-8225-53b2e5ad7050","Type":"ContainerStarted","Data":"07d652b0c84610556b38b37dfb20b68ec84c669986fbfa5207608ab1398b3010"} Oct 13 21:41:59 crc kubenswrapper[4689]: I1013 21:41:59.624434 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" podStartSLOduration=2.173890542 podStartE2EDuration="2.624416443s" podCreationTimestamp="2025-10-13 21:41:57 +0000 UTC" firstStartedPulling="2025-10-13 21:41:58.576095598 +0000 UTC m=+1835.494340723" lastFinishedPulling="2025-10-13 21:41:59.026621499 +0000 UTC m=+1835.944866624" observedRunningTime="2025-10-13 21:41:59.622879666 +0000 UTC m=+1836.541124781" watchObservedRunningTime="2025-10-13 21:41:59.624416443 +0000 UTC m=+1836.542661518" Oct 13 21:42:09 crc kubenswrapper[4689]: I1013 21:42:09.684389 4689 generic.go:334] "Generic (PLEG): container finished" podID="024b2226-0636-4d0c-8225-53b2e5ad7050" containerID="f1129c5eba5e43afd0de761b59c211b106fe2308c6dd8b118a45fe7650e15419" exitCode=0 Oct 13 21:42:09 crc kubenswrapper[4689]: I1013 21:42:09.684432 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" event={"ID":"024b2226-0636-4d0c-8225-53b2e5ad7050","Type":"ContainerDied","Data":"f1129c5eba5e43afd0de761b59c211b106fe2308c6dd8b118a45fe7650e15419"} Oct 13 21:42:09 crc kubenswrapper[4689]: I1013 21:42:09.868367 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:42:09 crc kubenswrapper[4689]: E1013 21:42:09.868912 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.149488 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.280732 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckddl\" (UniqueName: \"kubernetes.io/projected/024b2226-0636-4d0c-8225-53b2e5ad7050-kube-api-access-ckddl\") pod \"024b2226-0636-4d0c-8225-53b2e5ad7050\" (UID: \"024b2226-0636-4d0c-8225-53b2e5ad7050\") " Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.280998 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/024b2226-0636-4d0c-8225-53b2e5ad7050-ssh-key\") pod \"024b2226-0636-4d0c-8225-53b2e5ad7050\" (UID: \"024b2226-0636-4d0c-8225-53b2e5ad7050\") " Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.281120 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/024b2226-0636-4d0c-8225-53b2e5ad7050-inventory\") pod \"024b2226-0636-4d0c-8225-53b2e5ad7050\" (UID: \"024b2226-0636-4d0c-8225-53b2e5ad7050\") " Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.286605 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/024b2226-0636-4d0c-8225-53b2e5ad7050-kube-api-access-ckddl" (OuterVolumeSpecName: "kube-api-access-ckddl") pod "024b2226-0636-4d0c-8225-53b2e5ad7050" (UID: "024b2226-0636-4d0c-8225-53b2e5ad7050"). InnerVolumeSpecName "kube-api-access-ckddl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.309115 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/024b2226-0636-4d0c-8225-53b2e5ad7050-inventory" (OuterVolumeSpecName: "inventory") pod "024b2226-0636-4d0c-8225-53b2e5ad7050" (UID: "024b2226-0636-4d0c-8225-53b2e5ad7050"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.316429 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/024b2226-0636-4d0c-8225-53b2e5ad7050-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "024b2226-0636-4d0c-8225-53b2e5ad7050" (UID: "024b2226-0636-4d0c-8225-53b2e5ad7050"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.383719 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/024b2226-0636-4d0c-8225-53b2e5ad7050-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.383770 4689 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/024b2226-0636-4d0c-8225-53b2e5ad7050-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.383784 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckddl\" (UniqueName: \"kubernetes.io/projected/024b2226-0636-4d0c-8225-53b2e5ad7050-kube-api-access-ckddl\") on node \"crc\" DevicePath \"\"" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.702348 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" event={"ID":"024b2226-0636-4d0c-8225-53b2e5ad7050","Type":"ContainerDied","Data":"07d652b0c84610556b38b37dfb20b68ec84c669986fbfa5207608ab1398b3010"} Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.702390 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="07d652b0c84610556b38b37dfb20b68ec84c669986fbfa5207608ab1398b3010" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.702437 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.786454 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8"] Oct 13 21:42:11 crc kubenswrapper[4689]: E1013 21:42:11.786910 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="024b2226-0636-4d0c-8225-53b2e5ad7050" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.786931 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="024b2226-0636-4d0c-8225-53b2e5ad7050" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.787169 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="024b2226-0636-4d0c-8225-53b2e5ad7050" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.787938 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.790646 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.790696 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.790826 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.790923 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.790949 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.790981 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.791054 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.791127 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d5nnx" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.805638 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8"] Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.892282 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.892354 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.892411 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.892576 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.892659 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.892788 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.892967 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.893008 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.893319 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.893364 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.893433 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.893486 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.893524 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.893706 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdtxj\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-kube-api-access-tdtxj\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.995709 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.995815 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.995921 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.996002 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdtxj\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-kube-api-access-tdtxj\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.996100 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.996173 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.996243 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.996307 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.996361 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.996493 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.996626 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.996688 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.996808 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:11 crc kubenswrapper[4689]: I1013 21:42:11.996884 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:12 crc kubenswrapper[4689]: I1013 21:42:12.000270 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:12 crc kubenswrapper[4689]: I1013 21:42:12.000758 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:12 crc kubenswrapper[4689]: I1013 21:42:12.001122 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:12 crc kubenswrapper[4689]: I1013 21:42:12.001297 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:12 crc kubenswrapper[4689]: I1013 21:42:12.001445 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:12 crc kubenswrapper[4689]: I1013 21:42:12.002305 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:12 crc kubenswrapper[4689]: I1013 21:42:12.002934 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:12 crc kubenswrapper[4689]: I1013 21:42:12.003405 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:12 crc kubenswrapper[4689]: I1013 21:42:12.003997 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:12 crc kubenswrapper[4689]: I1013 21:42:12.004223 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:12 crc kubenswrapper[4689]: I1013 21:42:12.004478 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:12 crc kubenswrapper[4689]: I1013 21:42:12.004486 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:12 crc kubenswrapper[4689]: I1013 21:42:12.004912 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:12 crc kubenswrapper[4689]: I1013 21:42:12.016531 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdtxj\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-kube-api-access-tdtxj\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-48qv8\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:12 crc kubenswrapper[4689]: I1013 21:42:12.163131 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:12 crc kubenswrapper[4689]: I1013 21:42:12.669858 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8"] Oct 13 21:42:12 crc kubenswrapper[4689]: I1013 21:42:12.711340 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" event={"ID":"af61fcd9-0c85-418a-8329-0a0dc4236f35","Type":"ContainerStarted","Data":"771a9b303b720dc86b2971764469de637e5e9c2088ec5d84ba99c38610ba1b1a"} Oct 13 21:42:13 crc kubenswrapper[4689]: I1013 21:42:13.720631 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" event={"ID":"af61fcd9-0c85-418a-8329-0a0dc4236f35","Type":"ContainerStarted","Data":"14ae8a9c124b2e42c39e4e33317f3bc655b2f4c3b2269cec5c73f805d3d5dbf4"} Oct 13 21:42:22 crc kubenswrapper[4689]: I1013 21:42:22.868444 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:42:22 crc kubenswrapper[4689]: E1013 21:42:22.870042 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:42:33 crc kubenswrapper[4689]: I1013 21:42:33.884027 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:42:33 crc kubenswrapper[4689]: E1013 21:42:33.888933 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:42:44 crc kubenswrapper[4689]: I1013 21:42:44.867132 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:42:44 crc kubenswrapper[4689]: E1013 21:42:44.868148 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:42:56 crc kubenswrapper[4689]: I1013 21:42:56.475478 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" podStartSLOduration=44.894559003 podStartE2EDuration="45.475453365s" podCreationTimestamp="2025-10-13 21:42:11 +0000 UTC" firstStartedPulling="2025-10-13 21:42:12.668541289 +0000 UTC m=+1849.586786394" lastFinishedPulling="2025-10-13 21:42:13.249435671 +0000 UTC m=+1850.167680756" observedRunningTime="2025-10-13 21:42:13.75212021 +0000 UTC m=+1850.670365295" watchObservedRunningTime="2025-10-13 21:42:56.475453365 +0000 UTC m=+1893.393698450" Oct 13 21:42:56 crc kubenswrapper[4689]: I1013 21:42:56.493272 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-c4xm7"] Oct 13 21:42:56 crc kubenswrapper[4689]: I1013 21:42:56.495520 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c4xm7" Oct 13 21:42:56 crc kubenswrapper[4689]: I1013 21:42:56.516637 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c4xm7"] Oct 13 21:42:56 crc kubenswrapper[4689]: I1013 21:42:56.600767 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1eb5fcf6-962f-41f5-a438-c345c24454fa-utilities\") pod \"redhat-marketplace-c4xm7\" (UID: \"1eb5fcf6-962f-41f5-a438-c345c24454fa\") " pod="openshift-marketplace/redhat-marketplace-c4xm7" Oct 13 21:42:56 crc kubenswrapper[4689]: I1013 21:42:56.600858 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b27jk\" (UniqueName: \"kubernetes.io/projected/1eb5fcf6-962f-41f5-a438-c345c24454fa-kube-api-access-b27jk\") pod \"redhat-marketplace-c4xm7\" (UID: \"1eb5fcf6-962f-41f5-a438-c345c24454fa\") " pod="openshift-marketplace/redhat-marketplace-c4xm7" Oct 13 21:42:56 crc kubenswrapper[4689]: I1013 21:42:56.600884 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1eb5fcf6-962f-41f5-a438-c345c24454fa-catalog-content\") pod \"redhat-marketplace-c4xm7\" (UID: \"1eb5fcf6-962f-41f5-a438-c345c24454fa\") " pod="openshift-marketplace/redhat-marketplace-c4xm7" Oct 13 21:42:56 crc kubenswrapper[4689]: I1013 21:42:56.703836 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1eb5fcf6-962f-41f5-a438-c345c24454fa-utilities\") pod \"redhat-marketplace-c4xm7\" (UID: \"1eb5fcf6-962f-41f5-a438-c345c24454fa\") " pod="openshift-marketplace/redhat-marketplace-c4xm7" Oct 13 21:42:56 crc kubenswrapper[4689]: I1013 21:42:56.704128 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b27jk\" (UniqueName: \"kubernetes.io/projected/1eb5fcf6-962f-41f5-a438-c345c24454fa-kube-api-access-b27jk\") pod \"redhat-marketplace-c4xm7\" (UID: \"1eb5fcf6-962f-41f5-a438-c345c24454fa\") " pod="openshift-marketplace/redhat-marketplace-c4xm7" Oct 13 21:42:56 crc kubenswrapper[4689]: I1013 21:42:56.704188 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1eb5fcf6-962f-41f5-a438-c345c24454fa-catalog-content\") pod \"redhat-marketplace-c4xm7\" (UID: \"1eb5fcf6-962f-41f5-a438-c345c24454fa\") " pod="openshift-marketplace/redhat-marketplace-c4xm7" Oct 13 21:42:56 crc kubenswrapper[4689]: I1013 21:42:56.704944 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1eb5fcf6-962f-41f5-a438-c345c24454fa-catalog-content\") pod \"redhat-marketplace-c4xm7\" (UID: \"1eb5fcf6-962f-41f5-a438-c345c24454fa\") " pod="openshift-marketplace/redhat-marketplace-c4xm7" Oct 13 21:42:56 crc kubenswrapper[4689]: I1013 21:42:56.705009 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1eb5fcf6-962f-41f5-a438-c345c24454fa-utilities\") pod \"redhat-marketplace-c4xm7\" (UID: \"1eb5fcf6-962f-41f5-a438-c345c24454fa\") " pod="openshift-marketplace/redhat-marketplace-c4xm7" Oct 13 21:42:56 crc kubenswrapper[4689]: I1013 21:42:56.735983 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b27jk\" (UniqueName: \"kubernetes.io/projected/1eb5fcf6-962f-41f5-a438-c345c24454fa-kube-api-access-b27jk\") pod \"redhat-marketplace-c4xm7\" (UID: \"1eb5fcf6-962f-41f5-a438-c345c24454fa\") " pod="openshift-marketplace/redhat-marketplace-c4xm7" Oct 13 21:42:56 crc kubenswrapper[4689]: I1013 21:42:56.825395 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c4xm7" Oct 13 21:42:57 crc kubenswrapper[4689]: I1013 21:42:57.108999 4689 generic.go:334] "Generic (PLEG): container finished" podID="af61fcd9-0c85-418a-8329-0a0dc4236f35" containerID="14ae8a9c124b2e42c39e4e33317f3bc655b2f4c3b2269cec5c73f805d3d5dbf4" exitCode=0 Oct 13 21:42:57 crc kubenswrapper[4689]: I1013 21:42:57.109102 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" event={"ID":"af61fcd9-0c85-418a-8329-0a0dc4236f35","Type":"ContainerDied","Data":"14ae8a9c124b2e42c39e4e33317f3bc655b2f4c3b2269cec5c73f805d3d5dbf4"} Oct 13 21:42:57 crc kubenswrapper[4689]: I1013 21:42:57.286936 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c4xm7"] Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.119036 4689 generic.go:334] "Generic (PLEG): container finished" podID="1eb5fcf6-962f-41f5-a438-c345c24454fa" containerID="5dc6c009fd6735942d94fb92e2fb0215f8020fef8e21beba61bb8a48bf589892" exitCode=0 Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.119616 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4xm7" event={"ID":"1eb5fcf6-962f-41f5-a438-c345c24454fa","Type":"ContainerDied","Data":"5dc6c009fd6735942d94fb92e2fb0215f8020fef8e21beba61bb8a48bf589892"} Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.119658 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4xm7" event={"ID":"1eb5fcf6-962f-41f5-a438-c345c24454fa","Type":"ContainerStarted","Data":"176c58880093ae9d9179d55992b584835d6f40b0fdfc159ae962c369239a1416"} Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.530983 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.686479 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-bootstrap-combined-ca-bundle\") pod \"af61fcd9-0c85-418a-8329-0a0dc4236f35\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.686648 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"af61fcd9-0c85-418a-8329-0a0dc4236f35\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.686802 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-ovn-combined-ca-bundle\") pod \"af61fcd9-0c85-418a-8329-0a0dc4236f35\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.688447 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"af61fcd9-0c85-418a-8329-0a0dc4236f35\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.688705 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-inventory\") pod \"af61fcd9-0c85-418a-8329-0a0dc4236f35\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.688748 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-telemetry-combined-ca-bundle\") pod \"af61fcd9-0c85-418a-8329-0a0dc4236f35\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.688797 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-repo-setup-combined-ca-bundle\") pod \"af61fcd9-0c85-418a-8329-0a0dc4236f35\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.688820 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdtxj\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-kube-api-access-tdtxj\") pod \"af61fcd9-0c85-418a-8329-0a0dc4236f35\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.688847 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-ssh-key\") pod \"af61fcd9-0c85-418a-8329-0a0dc4236f35\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.688894 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-nova-combined-ca-bundle\") pod \"af61fcd9-0c85-418a-8329-0a0dc4236f35\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.688925 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-neutron-metadata-combined-ca-bundle\") pod \"af61fcd9-0c85-418a-8329-0a0dc4236f35\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.688959 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-libvirt-combined-ca-bundle\") pod \"af61fcd9-0c85-418a-8329-0a0dc4236f35\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.689008 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"af61fcd9-0c85-418a-8329-0a0dc4236f35\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.689059 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-ovn-default-certs-0\") pod \"af61fcd9-0c85-418a-8329-0a0dc4236f35\" (UID: \"af61fcd9-0c85-418a-8329-0a0dc4236f35\") " Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.693448 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "af61fcd9-0c85-418a-8329-0a0dc4236f35" (UID: "af61fcd9-0c85-418a-8329-0a0dc4236f35"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.693490 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "af61fcd9-0c85-418a-8329-0a0dc4236f35" (UID: "af61fcd9-0c85-418a-8329-0a0dc4236f35"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.694295 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "af61fcd9-0c85-418a-8329-0a0dc4236f35" (UID: "af61fcd9-0c85-418a-8329-0a0dc4236f35"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.694523 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-kube-api-access-tdtxj" (OuterVolumeSpecName: "kube-api-access-tdtxj") pod "af61fcd9-0c85-418a-8329-0a0dc4236f35" (UID: "af61fcd9-0c85-418a-8329-0a0dc4236f35"). InnerVolumeSpecName "kube-api-access-tdtxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.694978 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "af61fcd9-0c85-418a-8329-0a0dc4236f35" (UID: "af61fcd9-0c85-418a-8329-0a0dc4236f35"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.695061 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "af61fcd9-0c85-418a-8329-0a0dc4236f35" (UID: "af61fcd9-0c85-418a-8329-0a0dc4236f35"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.695653 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "af61fcd9-0c85-418a-8329-0a0dc4236f35" (UID: "af61fcd9-0c85-418a-8329-0a0dc4236f35"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.696253 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "af61fcd9-0c85-418a-8329-0a0dc4236f35" (UID: "af61fcd9-0c85-418a-8329-0a0dc4236f35"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.696787 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "af61fcd9-0c85-418a-8329-0a0dc4236f35" (UID: "af61fcd9-0c85-418a-8329-0a0dc4236f35"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.697156 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "af61fcd9-0c85-418a-8329-0a0dc4236f35" (UID: "af61fcd9-0c85-418a-8329-0a0dc4236f35"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.697696 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "af61fcd9-0c85-418a-8329-0a0dc4236f35" (UID: "af61fcd9-0c85-418a-8329-0a0dc4236f35"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.700753 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "af61fcd9-0c85-418a-8329-0a0dc4236f35" (UID: "af61fcd9-0c85-418a-8329-0a0dc4236f35"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.717221 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "af61fcd9-0c85-418a-8329-0a0dc4236f35" (UID: "af61fcd9-0c85-418a-8329-0a0dc4236f35"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.722738 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-inventory" (OuterVolumeSpecName: "inventory") pod "af61fcd9-0c85-418a-8329-0a0dc4236f35" (UID: "af61fcd9-0c85-418a-8329-0a0dc4236f35"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.791994 4689 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.792396 4689 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.792410 4689 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.792423 4689 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.792457 4689 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.792467 4689 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.792479 4689 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.792491 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdtxj\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-kube-api-access-tdtxj\") on node \"crc\" DevicePath \"\"" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.792505 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.792536 4689 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.792545 4689 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.792555 4689 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af61fcd9-0c85-418a-8329-0a0dc4236f35-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.792567 4689 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:42:58 crc kubenswrapper[4689]: I1013 21:42:58.792576 4689 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/af61fcd9-0c85-418a-8329-0a0dc4236f35-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.135361 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" event={"ID":"af61fcd9-0c85-418a-8329-0a0dc4236f35","Type":"ContainerDied","Data":"771a9b303b720dc86b2971764469de637e5e9c2088ec5d84ba99c38610ba1b1a"} Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.135404 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="771a9b303b720dc86b2971764469de637e5e9c2088ec5d84ba99c38610ba1b1a" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.135435 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-48qv8" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.232707 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j"] Oct 13 21:42:59 crc kubenswrapper[4689]: E1013 21:42:59.233437 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af61fcd9-0c85-418a-8329-0a0dc4236f35" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.233457 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="af61fcd9-0c85-418a-8329-0a0dc4236f35" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.233685 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="af61fcd9-0c85-418a-8329-0a0dc4236f35" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.234269 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.238843 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.238902 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.239037 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.239045 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.239157 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d5nnx" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.243783 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j"] Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.301698 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vjk7j\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.301787 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vjk7j\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.301838 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vjk7j\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.301894 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0185d029-cb9b-4438-a72a-6616759e267e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vjk7j\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.301948 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krptr\" (UniqueName: \"kubernetes.io/projected/0185d029-cb9b-4438-a72a-6616759e267e-kube-api-access-krptr\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vjk7j\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.403336 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vjk7j\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.403412 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vjk7j\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.403461 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0185d029-cb9b-4438-a72a-6616759e267e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vjk7j\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.403497 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krptr\" (UniqueName: \"kubernetes.io/projected/0185d029-cb9b-4438-a72a-6616759e267e-kube-api-access-krptr\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vjk7j\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.403566 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vjk7j\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.405126 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0185d029-cb9b-4438-a72a-6616759e267e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vjk7j\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.410671 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vjk7j\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.410697 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vjk7j\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.410702 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vjk7j\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.419381 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krptr\" (UniqueName: \"kubernetes.io/projected/0185d029-cb9b-4438-a72a-6616759e267e-kube-api-access-krptr\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vjk7j\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.581303 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:42:59 crc kubenswrapper[4689]: I1013 21:42:59.867599 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:42:59 crc kubenswrapper[4689]: E1013 21:42:59.868191 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:43:00 crc kubenswrapper[4689]: I1013 21:43:00.150070 4689 generic.go:334] "Generic (PLEG): container finished" podID="1eb5fcf6-962f-41f5-a438-c345c24454fa" containerID="3becbc75596650703c1cd35542791af6adc7cef6fc8f20f0df7810d040be15ea" exitCode=0 Oct 13 21:43:00 crc kubenswrapper[4689]: I1013 21:43:00.150153 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4xm7" event={"ID":"1eb5fcf6-962f-41f5-a438-c345c24454fa","Type":"ContainerDied","Data":"3becbc75596650703c1cd35542791af6adc7cef6fc8f20f0df7810d040be15ea"} Oct 13 21:43:00 crc kubenswrapper[4689]: I1013 21:43:00.207003 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j"] Oct 13 21:43:00 crc kubenswrapper[4689]: W1013 21:43:00.210966 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0185d029_cb9b_4438_a72a_6616759e267e.slice/crio-3197aa0d23076f03d9fa7a7b5cf0b456f9d673855c6de65b6092635a65b2c4ed WatchSource:0}: Error finding container 3197aa0d23076f03d9fa7a7b5cf0b456f9d673855c6de65b6092635a65b2c4ed: Status 404 returned error can't find the container with id 3197aa0d23076f03d9fa7a7b5cf0b456f9d673855c6de65b6092635a65b2c4ed Oct 13 21:43:01 crc kubenswrapper[4689]: I1013 21:43:01.163168 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4xm7" event={"ID":"1eb5fcf6-962f-41f5-a438-c345c24454fa","Type":"ContainerStarted","Data":"a893e1bd7afe063aac3c185114a5bfd3ed6db624fb4f2991dc57e8f56ec319d1"} Oct 13 21:43:01 crc kubenswrapper[4689]: I1013 21:43:01.166931 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" event={"ID":"0185d029-cb9b-4438-a72a-6616759e267e","Type":"ContainerStarted","Data":"84fe95e47b0e6ba64f1b075e5593bfeeade23c69d67f36b271c1f379cb891d6c"} Oct 13 21:43:01 crc kubenswrapper[4689]: I1013 21:43:01.166984 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" event={"ID":"0185d029-cb9b-4438-a72a-6616759e267e","Type":"ContainerStarted","Data":"3197aa0d23076f03d9fa7a7b5cf0b456f9d673855c6de65b6092635a65b2c4ed"} Oct 13 21:43:01 crc kubenswrapper[4689]: I1013 21:43:01.224188 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-c4xm7" podStartSLOduration=2.761559806 podStartE2EDuration="5.224140932s" podCreationTimestamp="2025-10-13 21:42:56 +0000 UTC" firstStartedPulling="2025-10-13 21:42:58.121902821 +0000 UTC m=+1895.040147906" lastFinishedPulling="2025-10-13 21:43:00.584483927 +0000 UTC m=+1897.502729032" observedRunningTime="2025-10-13 21:43:01.190526195 +0000 UTC m=+1898.108771300" watchObservedRunningTime="2025-10-13 21:43:01.224140932 +0000 UTC m=+1898.142386057" Oct 13 21:43:01 crc kubenswrapper[4689]: I1013 21:43:01.251784 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" podStartSLOduration=1.742172966 podStartE2EDuration="2.251757847s" podCreationTimestamp="2025-10-13 21:42:59 +0000 UTC" firstStartedPulling="2025-10-13 21:43:00.214394512 +0000 UTC m=+1897.132639597" lastFinishedPulling="2025-10-13 21:43:00.723979393 +0000 UTC m=+1897.642224478" observedRunningTime="2025-10-13 21:43:01.236313751 +0000 UTC m=+1898.154558846" watchObservedRunningTime="2025-10-13 21:43:01.251757847 +0000 UTC m=+1898.170002942" Oct 13 21:43:06 crc kubenswrapper[4689]: I1013 21:43:06.825893 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-c4xm7" Oct 13 21:43:06 crc kubenswrapper[4689]: I1013 21:43:06.826725 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-c4xm7" Oct 13 21:43:06 crc kubenswrapper[4689]: I1013 21:43:06.886740 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-c4xm7" Oct 13 21:43:07 crc kubenswrapper[4689]: I1013 21:43:07.273361 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-c4xm7" Oct 13 21:43:07 crc kubenswrapper[4689]: I1013 21:43:07.322492 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c4xm7"] Oct 13 21:43:09 crc kubenswrapper[4689]: I1013 21:43:09.238289 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-c4xm7" podUID="1eb5fcf6-962f-41f5-a438-c345c24454fa" containerName="registry-server" containerID="cri-o://a893e1bd7afe063aac3c185114a5bfd3ed6db624fb4f2991dc57e8f56ec319d1" gracePeriod=2 Oct 13 21:43:09 crc kubenswrapper[4689]: I1013 21:43:09.719254 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c4xm7" Oct 13 21:43:09 crc kubenswrapper[4689]: I1013 21:43:09.724126 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1eb5fcf6-962f-41f5-a438-c345c24454fa-utilities\") pod \"1eb5fcf6-962f-41f5-a438-c345c24454fa\" (UID: \"1eb5fcf6-962f-41f5-a438-c345c24454fa\") " Oct 13 21:43:09 crc kubenswrapper[4689]: I1013 21:43:09.724820 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1eb5fcf6-962f-41f5-a438-c345c24454fa-catalog-content\") pod \"1eb5fcf6-962f-41f5-a438-c345c24454fa\" (UID: \"1eb5fcf6-962f-41f5-a438-c345c24454fa\") " Oct 13 21:43:09 crc kubenswrapper[4689]: I1013 21:43:09.725000 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b27jk\" (UniqueName: \"kubernetes.io/projected/1eb5fcf6-962f-41f5-a438-c345c24454fa-kube-api-access-b27jk\") pod \"1eb5fcf6-962f-41f5-a438-c345c24454fa\" (UID: \"1eb5fcf6-962f-41f5-a438-c345c24454fa\") " Oct 13 21:43:09 crc kubenswrapper[4689]: I1013 21:43:09.725783 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1eb5fcf6-962f-41f5-a438-c345c24454fa-utilities" (OuterVolumeSpecName: "utilities") pod "1eb5fcf6-962f-41f5-a438-c345c24454fa" (UID: "1eb5fcf6-962f-41f5-a438-c345c24454fa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:43:09 crc kubenswrapper[4689]: I1013 21:43:09.727648 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1eb5fcf6-962f-41f5-a438-c345c24454fa-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:43:09 crc kubenswrapper[4689]: I1013 21:43:09.747423 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1eb5fcf6-962f-41f5-a438-c345c24454fa-kube-api-access-b27jk" (OuterVolumeSpecName: "kube-api-access-b27jk") pod "1eb5fcf6-962f-41f5-a438-c345c24454fa" (UID: "1eb5fcf6-962f-41f5-a438-c345c24454fa"). InnerVolumeSpecName "kube-api-access-b27jk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:43:09 crc kubenswrapper[4689]: I1013 21:43:09.758857 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1eb5fcf6-962f-41f5-a438-c345c24454fa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1eb5fcf6-962f-41f5-a438-c345c24454fa" (UID: "1eb5fcf6-962f-41f5-a438-c345c24454fa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:43:09 crc kubenswrapper[4689]: I1013 21:43:09.855430 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b27jk\" (UniqueName: \"kubernetes.io/projected/1eb5fcf6-962f-41f5-a438-c345c24454fa-kube-api-access-b27jk\") on node \"crc\" DevicePath \"\"" Oct 13 21:43:09 crc kubenswrapper[4689]: I1013 21:43:09.855523 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1eb5fcf6-962f-41f5-a438-c345c24454fa-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:43:10 crc kubenswrapper[4689]: I1013 21:43:10.255289 4689 generic.go:334] "Generic (PLEG): container finished" podID="1eb5fcf6-962f-41f5-a438-c345c24454fa" containerID="a893e1bd7afe063aac3c185114a5bfd3ed6db624fb4f2991dc57e8f56ec319d1" exitCode=0 Oct 13 21:43:10 crc kubenswrapper[4689]: I1013 21:43:10.255335 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4xm7" event={"ID":"1eb5fcf6-962f-41f5-a438-c345c24454fa","Type":"ContainerDied","Data":"a893e1bd7afe063aac3c185114a5bfd3ed6db624fb4f2991dc57e8f56ec319d1"} Oct 13 21:43:10 crc kubenswrapper[4689]: I1013 21:43:10.255363 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4xm7" event={"ID":"1eb5fcf6-962f-41f5-a438-c345c24454fa","Type":"ContainerDied","Data":"176c58880093ae9d9179d55992b584835d6f40b0fdfc159ae962c369239a1416"} Oct 13 21:43:10 crc kubenswrapper[4689]: I1013 21:43:10.255395 4689 scope.go:117] "RemoveContainer" containerID="a893e1bd7afe063aac3c185114a5bfd3ed6db624fb4f2991dc57e8f56ec319d1" Oct 13 21:43:10 crc kubenswrapper[4689]: I1013 21:43:10.255400 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c4xm7" Oct 13 21:43:10 crc kubenswrapper[4689]: I1013 21:43:10.287528 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c4xm7"] Oct 13 21:43:10 crc kubenswrapper[4689]: I1013 21:43:10.289577 4689 scope.go:117] "RemoveContainer" containerID="3becbc75596650703c1cd35542791af6adc7cef6fc8f20f0df7810d040be15ea" Oct 13 21:43:10 crc kubenswrapper[4689]: I1013 21:43:10.296640 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-c4xm7"] Oct 13 21:43:10 crc kubenswrapper[4689]: I1013 21:43:10.317994 4689 scope.go:117] "RemoveContainer" containerID="5dc6c009fd6735942d94fb92e2fb0215f8020fef8e21beba61bb8a48bf589892" Oct 13 21:43:10 crc kubenswrapper[4689]: I1013 21:43:10.381229 4689 scope.go:117] "RemoveContainer" containerID="a893e1bd7afe063aac3c185114a5bfd3ed6db624fb4f2991dc57e8f56ec319d1" Oct 13 21:43:10 crc kubenswrapper[4689]: E1013 21:43:10.382444 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a893e1bd7afe063aac3c185114a5bfd3ed6db624fb4f2991dc57e8f56ec319d1\": container with ID starting with a893e1bd7afe063aac3c185114a5bfd3ed6db624fb4f2991dc57e8f56ec319d1 not found: ID does not exist" containerID="a893e1bd7afe063aac3c185114a5bfd3ed6db624fb4f2991dc57e8f56ec319d1" Oct 13 21:43:10 crc kubenswrapper[4689]: I1013 21:43:10.382513 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a893e1bd7afe063aac3c185114a5bfd3ed6db624fb4f2991dc57e8f56ec319d1"} err="failed to get container status \"a893e1bd7afe063aac3c185114a5bfd3ed6db624fb4f2991dc57e8f56ec319d1\": rpc error: code = NotFound desc = could not find container \"a893e1bd7afe063aac3c185114a5bfd3ed6db624fb4f2991dc57e8f56ec319d1\": container with ID starting with a893e1bd7afe063aac3c185114a5bfd3ed6db624fb4f2991dc57e8f56ec319d1 not found: ID does not exist" Oct 13 21:43:10 crc kubenswrapper[4689]: I1013 21:43:10.382561 4689 scope.go:117] "RemoveContainer" containerID="3becbc75596650703c1cd35542791af6adc7cef6fc8f20f0df7810d040be15ea" Oct 13 21:43:10 crc kubenswrapper[4689]: E1013 21:43:10.383785 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3becbc75596650703c1cd35542791af6adc7cef6fc8f20f0df7810d040be15ea\": container with ID starting with 3becbc75596650703c1cd35542791af6adc7cef6fc8f20f0df7810d040be15ea not found: ID does not exist" containerID="3becbc75596650703c1cd35542791af6adc7cef6fc8f20f0df7810d040be15ea" Oct 13 21:43:10 crc kubenswrapper[4689]: I1013 21:43:10.383837 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3becbc75596650703c1cd35542791af6adc7cef6fc8f20f0df7810d040be15ea"} err="failed to get container status \"3becbc75596650703c1cd35542791af6adc7cef6fc8f20f0df7810d040be15ea\": rpc error: code = NotFound desc = could not find container \"3becbc75596650703c1cd35542791af6adc7cef6fc8f20f0df7810d040be15ea\": container with ID starting with 3becbc75596650703c1cd35542791af6adc7cef6fc8f20f0df7810d040be15ea not found: ID does not exist" Oct 13 21:43:10 crc kubenswrapper[4689]: I1013 21:43:10.383868 4689 scope.go:117] "RemoveContainer" containerID="5dc6c009fd6735942d94fb92e2fb0215f8020fef8e21beba61bb8a48bf589892" Oct 13 21:43:10 crc kubenswrapper[4689]: E1013 21:43:10.384255 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5dc6c009fd6735942d94fb92e2fb0215f8020fef8e21beba61bb8a48bf589892\": container with ID starting with 5dc6c009fd6735942d94fb92e2fb0215f8020fef8e21beba61bb8a48bf589892 not found: ID does not exist" containerID="5dc6c009fd6735942d94fb92e2fb0215f8020fef8e21beba61bb8a48bf589892" Oct 13 21:43:10 crc kubenswrapper[4689]: I1013 21:43:10.384318 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5dc6c009fd6735942d94fb92e2fb0215f8020fef8e21beba61bb8a48bf589892"} err="failed to get container status \"5dc6c009fd6735942d94fb92e2fb0215f8020fef8e21beba61bb8a48bf589892\": rpc error: code = NotFound desc = could not find container \"5dc6c009fd6735942d94fb92e2fb0215f8020fef8e21beba61bb8a48bf589892\": container with ID starting with 5dc6c009fd6735942d94fb92e2fb0215f8020fef8e21beba61bb8a48bf589892 not found: ID does not exist" Oct 13 21:43:10 crc kubenswrapper[4689]: I1013 21:43:10.868243 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:43:10 crc kubenswrapper[4689]: E1013 21:43:10.869163 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:43:11 crc kubenswrapper[4689]: I1013 21:43:11.884963 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1eb5fcf6-962f-41f5-a438-c345c24454fa" path="/var/lib/kubelet/pods/1eb5fcf6-962f-41f5-a438-c345c24454fa/volumes" Oct 13 21:43:21 crc kubenswrapper[4689]: I1013 21:43:21.869191 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:43:21 crc kubenswrapper[4689]: E1013 21:43:21.870395 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:43:32 crc kubenswrapper[4689]: I1013 21:43:32.868809 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:43:32 crc kubenswrapper[4689]: E1013 21:43:32.869657 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:43:43 crc kubenswrapper[4689]: I1013 21:43:43.873486 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:43:43 crc kubenswrapper[4689]: E1013 21:43:43.874442 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:43:55 crc kubenswrapper[4689]: I1013 21:43:55.868386 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:43:55 crc kubenswrapper[4689]: E1013 21:43:55.869089 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:44:07 crc kubenswrapper[4689]: I1013 21:44:07.867964 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:44:07 crc kubenswrapper[4689]: E1013 21:44:07.868755 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:44:12 crc kubenswrapper[4689]: I1013 21:44:12.936266 4689 generic.go:334] "Generic (PLEG): container finished" podID="0185d029-cb9b-4438-a72a-6616759e267e" containerID="84fe95e47b0e6ba64f1b075e5593bfeeade23c69d67f36b271c1f379cb891d6c" exitCode=0 Oct 13 21:44:12 crc kubenswrapper[4689]: I1013 21:44:12.936368 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" event={"ID":"0185d029-cb9b-4438-a72a-6616759e267e","Type":"ContainerDied","Data":"84fe95e47b0e6ba64f1b075e5593bfeeade23c69d67f36b271c1f379cb891d6c"} Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.436228 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.521685 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-inventory\") pod \"0185d029-cb9b-4438-a72a-6616759e267e\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.522383 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-ssh-key\") pod \"0185d029-cb9b-4438-a72a-6616759e267e\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.523200 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-ovn-combined-ca-bundle\") pod \"0185d029-cb9b-4438-a72a-6616759e267e\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.523440 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krptr\" (UniqueName: \"kubernetes.io/projected/0185d029-cb9b-4438-a72a-6616759e267e-kube-api-access-krptr\") pod \"0185d029-cb9b-4438-a72a-6616759e267e\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.523769 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0185d029-cb9b-4438-a72a-6616759e267e-ovncontroller-config-0\") pod \"0185d029-cb9b-4438-a72a-6616759e267e\" (UID: \"0185d029-cb9b-4438-a72a-6616759e267e\") " Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.527668 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0185d029-cb9b-4438-a72a-6616759e267e-kube-api-access-krptr" (OuterVolumeSpecName: "kube-api-access-krptr") pod "0185d029-cb9b-4438-a72a-6616759e267e" (UID: "0185d029-cb9b-4438-a72a-6616759e267e"). InnerVolumeSpecName "kube-api-access-krptr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.527684 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "0185d029-cb9b-4438-a72a-6616759e267e" (UID: "0185d029-cb9b-4438-a72a-6616759e267e"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.553733 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-inventory" (OuterVolumeSpecName: "inventory") pod "0185d029-cb9b-4438-a72a-6616759e267e" (UID: "0185d029-cb9b-4438-a72a-6616759e267e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.556522 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0185d029-cb9b-4438-a72a-6616759e267e" (UID: "0185d029-cb9b-4438-a72a-6616759e267e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.568195 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0185d029-cb9b-4438-a72a-6616759e267e-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "0185d029-cb9b-4438-a72a-6616759e267e" (UID: "0185d029-cb9b-4438-a72a-6616759e267e"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.626744 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.626780 4689 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.626792 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krptr\" (UniqueName: \"kubernetes.io/projected/0185d029-cb9b-4438-a72a-6616759e267e-kube-api-access-krptr\") on node \"crc\" DevicePath \"\"" Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.626801 4689 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0185d029-cb9b-4438-a72a-6616759e267e-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.626809 4689 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0185d029-cb9b-4438-a72a-6616759e267e-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.957243 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" event={"ID":"0185d029-cb9b-4438-a72a-6616759e267e","Type":"ContainerDied","Data":"3197aa0d23076f03d9fa7a7b5cf0b456f9d673855c6de65b6092635a65b2c4ed"} Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.957491 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3197aa0d23076f03d9fa7a7b5cf0b456f9d673855c6de65b6092635a65b2c4ed" Oct 13 21:44:14 crc kubenswrapper[4689]: I1013 21:44:14.957296 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vjk7j" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.060641 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz"] Oct 13 21:44:15 crc kubenswrapper[4689]: E1013 21:44:15.061449 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0185d029-cb9b-4438-a72a-6616759e267e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.061486 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="0185d029-cb9b-4438-a72a-6616759e267e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 13 21:44:15 crc kubenswrapper[4689]: E1013 21:44:15.061528 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eb5fcf6-962f-41f5-a438-c345c24454fa" containerName="extract-utilities" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.061541 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eb5fcf6-962f-41f5-a438-c345c24454fa" containerName="extract-utilities" Oct 13 21:44:15 crc kubenswrapper[4689]: E1013 21:44:15.061574 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eb5fcf6-962f-41f5-a438-c345c24454fa" containerName="extract-content" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.061601 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eb5fcf6-962f-41f5-a438-c345c24454fa" containerName="extract-content" Oct 13 21:44:15 crc kubenswrapper[4689]: E1013 21:44:15.061627 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eb5fcf6-962f-41f5-a438-c345c24454fa" containerName="registry-server" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.061637 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eb5fcf6-962f-41f5-a438-c345c24454fa" containerName="registry-server" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.062120 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="0185d029-cb9b-4438-a72a-6616759e267e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.062182 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="1eb5fcf6-962f-41f5-a438-c345c24454fa" containerName="registry-server" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.063464 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.066158 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.066431 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.066617 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.066768 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.066917 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.067026 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d5nnx" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.071118 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz"] Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.145348 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zr26\" (UniqueName: \"kubernetes.io/projected/d7480298-e4a8-4010-a526-9ca1dba08f71-kube-api-access-4zr26\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.145399 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.145446 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.145469 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.145513 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.145557 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.247837 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zr26\" (UniqueName: \"kubernetes.io/projected/d7480298-e4a8-4010-a526-9ca1dba08f71-kube-api-access-4zr26\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.247928 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.247984 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.248013 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.248071 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.248131 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.251968 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.252061 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.252449 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.252779 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.258519 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.264489 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zr26\" (UniqueName: \"kubernetes.io/projected/d7480298-e4a8-4010-a526-9ca1dba08f71-kube-api-access-4zr26\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.388320 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.890102 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz"] Oct 13 21:44:15 crc kubenswrapper[4689]: W1013 21:44:15.896179 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7480298_e4a8_4010_a526_9ca1dba08f71.slice/crio-c8eed196b196d698e6312dbe2b050d8318b868e8bd717b6faa56edc5fe11db9f WatchSource:0}: Error finding container c8eed196b196d698e6312dbe2b050d8318b868e8bd717b6faa56edc5fe11db9f: Status 404 returned error can't find the container with id c8eed196b196d698e6312dbe2b050d8318b868e8bd717b6faa56edc5fe11db9f Oct 13 21:44:15 crc kubenswrapper[4689]: I1013 21:44:15.966224 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" event={"ID":"d7480298-e4a8-4010-a526-9ca1dba08f71","Type":"ContainerStarted","Data":"c8eed196b196d698e6312dbe2b050d8318b868e8bd717b6faa56edc5fe11db9f"} Oct 13 21:44:17 crc kubenswrapper[4689]: I1013 21:44:17.991285 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" event={"ID":"d7480298-e4a8-4010-a526-9ca1dba08f71","Type":"ContainerStarted","Data":"133c3d85d5a7abae27512d5bd5de5e20ebe878a12799ff8a339ea05f579dfd2a"} Oct 13 21:44:18 crc kubenswrapper[4689]: I1013 21:44:18.018820 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" podStartSLOduration=2.059103621 podStartE2EDuration="3.018793964s" podCreationTimestamp="2025-10-13 21:44:15 +0000 UTC" firstStartedPulling="2025-10-13 21:44:15.898191306 +0000 UTC m=+1972.816436391" lastFinishedPulling="2025-10-13 21:44:16.857881649 +0000 UTC m=+1973.776126734" observedRunningTime="2025-10-13 21:44:18.011097162 +0000 UTC m=+1974.929342287" watchObservedRunningTime="2025-10-13 21:44:18.018793964 +0000 UTC m=+1974.937039079" Oct 13 21:44:22 crc kubenswrapper[4689]: I1013 21:44:22.868066 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:44:22 crc kubenswrapper[4689]: E1013 21:44:22.869411 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:44:33 crc kubenswrapper[4689]: I1013 21:44:33.874510 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:44:34 crc kubenswrapper[4689]: I1013 21:44:34.140760 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerStarted","Data":"abcf8c99b6135e589f4e832721e1f72c81d15fdf824349a4ecf448711e1087f4"} Oct 13 21:45:00 crc kubenswrapper[4689]: I1013 21:45:00.182143 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw"] Oct 13 21:45:00 crc kubenswrapper[4689]: I1013 21:45:00.189881 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw" Oct 13 21:45:00 crc kubenswrapper[4689]: I1013 21:45:00.192912 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 13 21:45:00 crc kubenswrapper[4689]: I1013 21:45:00.200501 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 13 21:45:00 crc kubenswrapper[4689]: I1013 21:45:00.203844 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw"] Oct 13 21:45:00 crc kubenswrapper[4689]: I1013 21:45:00.330365 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8spxg\" (UniqueName: \"kubernetes.io/projected/ee037022-8806-44b2-b9b6-8c0f07d91dfd-kube-api-access-8spxg\") pod \"collect-profiles-29339865-6xrtw\" (UID: \"ee037022-8806-44b2-b9b6-8c0f07d91dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw" Oct 13 21:45:00 crc kubenswrapper[4689]: I1013 21:45:00.330530 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ee037022-8806-44b2-b9b6-8c0f07d91dfd-secret-volume\") pod \"collect-profiles-29339865-6xrtw\" (UID: \"ee037022-8806-44b2-b9b6-8c0f07d91dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw" Oct 13 21:45:00 crc kubenswrapper[4689]: I1013 21:45:00.330889 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ee037022-8806-44b2-b9b6-8c0f07d91dfd-config-volume\") pod \"collect-profiles-29339865-6xrtw\" (UID: \"ee037022-8806-44b2-b9b6-8c0f07d91dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw" Oct 13 21:45:00 crc kubenswrapper[4689]: I1013 21:45:00.433474 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ee037022-8806-44b2-b9b6-8c0f07d91dfd-config-volume\") pod \"collect-profiles-29339865-6xrtw\" (UID: \"ee037022-8806-44b2-b9b6-8c0f07d91dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw" Oct 13 21:45:00 crc kubenswrapper[4689]: I1013 21:45:00.433923 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8spxg\" (UniqueName: \"kubernetes.io/projected/ee037022-8806-44b2-b9b6-8c0f07d91dfd-kube-api-access-8spxg\") pod \"collect-profiles-29339865-6xrtw\" (UID: \"ee037022-8806-44b2-b9b6-8c0f07d91dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw" Oct 13 21:45:00 crc kubenswrapper[4689]: I1013 21:45:00.433978 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ee037022-8806-44b2-b9b6-8c0f07d91dfd-secret-volume\") pod \"collect-profiles-29339865-6xrtw\" (UID: \"ee037022-8806-44b2-b9b6-8c0f07d91dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw" Oct 13 21:45:00 crc kubenswrapper[4689]: I1013 21:45:00.435077 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ee037022-8806-44b2-b9b6-8c0f07d91dfd-config-volume\") pod \"collect-profiles-29339865-6xrtw\" (UID: \"ee037022-8806-44b2-b9b6-8c0f07d91dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw" Oct 13 21:45:00 crc kubenswrapper[4689]: I1013 21:45:00.448673 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ee037022-8806-44b2-b9b6-8c0f07d91dfd-secret-volume\") pod \"collect-profiles-29339865-6xrtw\" (UID: \"ee037022-8806-44b2-b9b6-8c0f07d91dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw" Oct 13 21:45:00 crc kubenswrapper[4689]: I1013 21:45:00.452935 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8spxg\" (UniqueName: \"kubernetes.io/projected/ee037022-8806-44b2-b9b6-8c0f07d91dfd-kube-api-access-8spxg\") pod \"collect-profiles-29339865-6xrtw\" (UID: \"ee037022-8806-44b2-b9b6-8c0f07d91dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw" Oct 13 21:45:00 crc kubenswrapper[4689]: I1013 21:45:00.525534 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw" Oct 13 21:45:00 crc kubenswrapper[4689]: I1013 21:45:00.957344 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw"] Oct 13 21:45:01 crc kubenswrapper[4689]: I1013 21:45:01.001750 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9fwhg"] Oct 13 21:45:01 crc kubenswrapper[4689]: I1013 21:45:01.003784 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9fwhg" Oct 13 21:45:01 crc kubenswrapper[4689]: I1013 21:45:01.027182 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9fwhg"] Oct 13 21:45:01 crc kubenswrapper[4689]: I1013 21:45:01.148799 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cgkp\" (UniqueName: \"kubernetes.io/projected/47320446-450b-4f65-8b40-339eb2e6ab37-kube-api-access-7cgkp\") pod \"redhat-operators-9fwhg\" (UID: \"47320446-450b-4f65-8b40-339eb2e6ab37\") " pod="openshift-marketplace/redhat-operators-9fwhg" Oct 13 21:45:01 crc kubenswrapper[4689]: I1013 21:45:01.149208 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47320446-450b-4f65-8b40-339eb2e6ab37-catalog-content\") pod \"redhat-operators-9fwhg\" (UID: \"47320446-450b-4f65-8b40-339eb2e6ab37\") " pod="openshift-marketplace/redhat-operators-9fwhg" Oct 13 21:45:01 crc kubenswrapper[4689]: I1013 21:45:01.149328 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47320446-450b-4f65-8b40-339eb2e6ab37-utilities\") pod \"redhat-operators-9fwhg\" (UID: \"47320446-450b-4f65-8b40-339eb2e6ab37\") " pod="openshift-marketplace/redhat-operators-9fwhg" Oct 13 21:45:01 crc kubenswrapper[4689]: I1013 21:45:01.251532 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47320446-450b-4f65-8b40-339eb2e6ab37-utilities\") pod \"redhat-operators-9fwhg\" (UID: \"47320446-450b-4f65-8b40-339eb2e6ab37\") " pod="openshift-marketplace/redhat-operators-9fwhg" Oct 13 21:45:01 crc kubenswrapper[4689]: I1013 21:45:01.251714 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cgkp\" (UniqueName: \"kubernetes.io/projected/47320446-450b-4f65-8b40-339eb2e6ab37-kube-api-access-7cgkp\") pod \"redhat-operators-9fwhg\" (UID: \"47320446-450b-4f65-8b40-339eb2e6ab37\") " pod="openshift-marketplace/redhat-operators-9fwhg" Oct 13 21:45:01 crc kubenswrapper[4689]: I1013 21:45:01.251755 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47320446-450b-4f65-8b40-339eb2e6ab37-catalog-content\") pod \"redhat-operators-9fwhg\" (UID: \"47320446-450b-4f65-8b40-339eb2e6ab37\") " pod="openshift-marketplace/redhat-operators-9fwhg" Oct 13 21:45:01 crc kubenswrapper[4689]: I1013 21:45:01.252145 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47320446-450b-4f65-8b40-339eb2e6ab37-utilities\") pod \"redhat-operators-9fwhg\" (UID: \"47320446-450b-4f65-8b40-339eb2e6ab37\") " pod="openshift-marketplace/redhat-operators-9fwhg" Oct 13 21:45:01 crc kubenswrapper[4689]: I1013 21:45:01.252171 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47320446-450b-4f65-8b40-339eb2e6ab37-catalog-content\") pod \"redhat-operators-9fwhg\" (UID: \"47320446-450b-4f65-8b40-339eb2e6ab37\") " pod="openshift-marketplace/redhat-operators-9fwhg" Oct 13 21:45:01 crc kubenswrapper[4689]: I1013 21:45:01.277457 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cgkp\" (UniqueName: \"kubernetes.io/projected/47320446-450b-4f65-8b40-339eb2e6ab37-kube-api-access-7cgkp\") pod \"redhat-operators-9fwhg\" (UID: \"47320446-450b-4f65-8b40-339eb2e6ab37\") " pod="openshift-marketplace/redhat-operators-9fwhg" Oct 13 21:45:01 crc kubenswrapper[4689]: I1013 21:45:01.343597 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9fwhg" Oct 13 21:45:01 crc kubenswrapper[4689]: I1013 21:45:01.443419 4689 generic.go:334] "Generic (PLEG): container finished" podID="ee037022-8806-44b2-b9b6-8c0f07d91dfd" containerID="fea54e4819d21ca097bf468682ee97f57ed3909467532b2d5d2ceebec2f7c0de" exitCode=0 Oct 13 21:45:01 crc kubenswrapper[4689]: I1013 21:45:01.443473 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw" event={"ID":"ee037022-8806-44b2-b9b6-8c0f07d91dfd","Type":"ContainerDied","Data":"fea54e4819d21ca097bf468682ee97f57ed3909467532b2d5d2ceebec2f7c0de"} Oct 13 21:45:01 crc kubenswrapper[4689]: I1013 21:45:01.443503 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw" event={"ID":"ee037022-8806-44b2-b9b6-8c0f07d91dfd","Type":"ContainerStarted","Data":"0cde15e427d06bd426a2c40e9d0e592d8659d25277d2ef2e814524c8918042d7"} Oct 13 21:45:01 crc kubenswrapper[4689]: I1013 21:45:01.793032 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9fwhg"] Oct 13 21:45:02 crc kubenswrapper[4689]: I1013 21:45:02.452304 4689 generic.go:334] "Generic (PLEG): container finished" podID="47320446-450b-4f65-8b40-339eb2e6ab37" containerID="259da1631f7b88ddb05c812f072c1ee2ab277d69b0cdf4d7d1171c42972959ae" exitCode=0 Oct 13 21:45:02 crc kubenswrapper[4689]: I1013 21:45:02.452403 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9fwhg" event={"ID":"47320446-450b-4f65-8b40-339eb2e6ab37","Type":"ContainerDied","Data":"259da1631f7b88ddb05c812f072c1ee2ab277d69b0cdf4d7d1171c42972959ae"} Oct 13 21:45:02 crc kubenswrapper[4689]: I1013 21:45:02.452605 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9fwhg" event={"ID":"47320446-450b-4f65-8b40-339eb2e6ab37","Type":"ContainerStarted","Data":"70271e971b403445d30a1043f01bc257819491743bd0b24b011979305eae7282"} Oct 13 21:45:02 crc kubenswrapper[4689]: I1013 21:45:02.811878 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw" Oct 13 21:45:02 crc kubenswrapper[4689]: I1013 21:45:02.989066 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ee037022-8806-44b2-b9b6-8c0f07d91dfd-config-volume\") pod \"ee037022-8806-44b2-b9b6-8c0f07d91dfd\" (UID: \"ee037022-8806-44b2-b9b6-8c0f07d91dfd\") " Oct 13 21:45:02 crc kubenswrapper[4689]: I1013 21:45:02.989485 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ee037022-8806-44b2-b9b6-8c0f07d91dfd-secret-volume\") pod \"ee037022-8806-44b2-b9b6-8c0f07d91dfd\" (UID: \"ee037022-8806-44b2-b9b6-8c0f07d91dfd\") " Oct 13 21:45:02 crc kubenswrapper[4689]: I1013 21:45:02.989539 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8spxg\" (UniqueName: \"kubernetes.io/projected/ee037022-8806-44b2-b9b6-8c0f07d91dfd-kube-api-access-8spxg\") pod \"ee037022-8806-44b2-b9b6-8c0f07d91dfd\" (UID: \"ee037022-8806-44b2-b9b6-8c0f07d91dfd\") " Oct 13 21:45:02 crc kubenswrapper[4689]: I1013 21:45:02.991274 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee037022-8806-44b2-b9b6-8c0f07d91dfd-config-volume" (OuterVolumeSpecName: "config-volume") pod "ee037022-8806-44b2-b9b6-8c0f07d91dfd" (UID: "ee037022-8806-44b2-b9b6-8c0f07d91dfd"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:45:02 crc kubenswrapper[4689]: I1013 21:45:02.996028 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee037022-8806-44b2-b9b6-8c0f07d91dfd-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ee037022-8806-44b2-b9b6-8c0f07d91dfd" (UID: "ee037022-8806-44b2-b9b6-8c0f07d91dfd"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:45:02 crc kubenswrapper[4689]: I1013 21:45:02.997918 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee037022-8806-44b2-b9b6-8c0f07d91dfd-kube-api-access-8spxg" (OuterVolumeSpecName: "kube-api-access-8spxg") pod "ee037022-8806-44b2-b9b6-8c0f07d91dfd" (UID: "ee037022-8806-44b2-b9b6-8c0f07d91dfd"). InnerVolumeSpecName "kube-api-access-8spxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:45:03 crc kubenswrapper[4689]: I1013 21:45:03.091447 4689 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ee037022-8806-44b2-b9b6-8c0f07d91dfd-config-volume\") on node \"crc\" DevicePath \"\"" Oct 13 21:45:03 crc kubenswrapper[4689]: I1013 21:45:03.091480 4689 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ee037022-8806-44b2-b9b6-8c0f07d91dfd-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 13 21:45:03 crc kubenswrapper[4689]: I1013 21:45:03.091490 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8spxg\" (UniqueName: \"kubernetes.io/projected/ee037022-8806-44b2-b9b6-8c0f07d91dfd-kube-api-access-8spxg\") on node \"crc\" DevicePath \"\"" Oct 13 21:45:03 crc kubenswrapper[4689]: I1013 21:45:03.468914 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw" event={"ID":"ee037022-8806-44b2-b9b6-8c0f07d91dfd","Type":"ContainerDied","Data":"0cde15e427d06bd426a2c40e9d0e592d8659d25277d2ef2e814524c8918042d7"} Oct 13 21:45:03 crc kubenswrapper[4689]: I1013 21:45:03.468962 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0cde15e427d06bd426a2c40e9d0e592d8659d25277d2ef2e814524c8918042d7" Oct 13 21:45:03 crc kubenswrapper[4689]: I1013 21:45:03.469025 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339865-6xrtw" Oct 13 21:45:03 crc kubenswrapper[4689]: I1013 21:45:03.904782 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj"] Oct 13 21:45:03 crc kubenswrapper[4689]: I1013 21:45:03.914182 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339820-8wtvj"] Oct 13 21:45:04 crc kubenswrapper[4689]: I1013 21:45:04.479939 4689 generic.go:334] "Generic (PLEG): container finished" podID="47320446-450b-4f65-8b40-339eb2e6ab37" containerID="0c4d86abd8e89163ba0e8e844e66bbb352e84d9b2f16159bf2a05deafae56c6f" exitCode=0 Oct 13 21:45:04 crc kubenswrapper[4689]: I1013 21:45:04.479982 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9fwhg" event={"ID":"47320446-450b-4f65-8b40-339eb2e6ab37","Type":"ContainerDied","Data":"0c4d86abd8e89163ba0e8e844e66bbb352e84d9b2f16159bf2a05deafae56c6f"} Oct 13 21:45:05 crc kubenswrapper[4689]: I1013 21:45:05.491600 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9fwhg" event={"ID":"47320446-450b-4f65-8b40-339eb2e6ab37","Type":"ContainerStarted","Data":"f89d8969fc62736522e2e92ccc88546658fe2381d014cdc5f594a8bf8aae8c41"} Oct 13 21:45:05 crc kubenswrapper[4689]: I1013 21:45:05.514486 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9fwhg" podStartSLOduration=3.03921343 podStartE2EDuration="5.514470437s" podCreationTimestamp="2025-10-13 21:45:00 +0000 UTC" firstStartedPulling="2025-10-13 21:45:02.453906796 +0000 UTC m=+2019.372151881" lastFinishedPulling="2025-10-13 21:45:04.929163793 +0000 UTC m=+2021.847408888" observedRunningTime="2025-10-13 21:45:05.51205219 +0000 UTC m=+2022.430297275" watchObservedRunningTime="2025-10-13 21:45:05.514470437 +0000 UTC m=+2022.432715522" Oct 13 21:45:05 crc kubenswrapper[4689]: I1013 21:45:05.891904 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62c53fad-57ac-4c62-86aa-f73e4e35b1f4" path="/var/lib/kubelet/pods/62c53fad-57ac-4c62-86aa-f73e4e35b1f4/volumes" Oct 13 21:45:11 crc kubenswrapper[4689]: I1013 21:45:11.343796 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9fwhg" Oct 13 21:45:11 crc kubenswrapper[4689]: I1013 21:45:11.344508 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9fwhg" Oct 13 21:45:11 crc kubenswrapper[4689]: I1013 21:45:11.396802 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9fwhg" Oct 13 21:45:11 crc kubenswrapper[4689]: I1013 21:45:11.555131 4689 generic.go:334] "Generic (PLEG): container finished" podID="d7480298-e4a8-4010-a526-9ca1dba08f71" containerID="133c3d85d5a7abae27512d5bd5de5e20ebe878a12799ff8a339ea05f579dfd2a" exitCode=0 Oct 13 21:45:11 crc kubenswrapper[4689]: I1013 21:45:11.555234 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" event={"ID":"d7480298-e4a8-4010-a526-9ca1dba08f71","Type":"ContainerDied","Data":"133c3d85d5a7abae27512d5bd5de5e20ebe878a12799ff8a339ea05f579dfd2a"} Oct 13 21:45:11 crc kubenswrapper[4689]: I1013 21:45:11.624266 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9fwhg" Oct 13 21:45:11 crc kubenswrapper[4689]: I1013 21:45:11.691811 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9fwhg"] Oct 13 21:45:12 crc kubenswrapper[4689]: I1013 21:45:12.938461 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.105760 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-neutron-ovn-metadata-agent-neutron-config-0\") pod \"d7480298-e4a8-4010-a526-9ca1dba08f71\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.105827 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zr26\" (UniqueName: \"kubernetes.io/projected/d7480298-e4a8-4010-a526-9ca1dba08f71-kube-api-access-4zr26\") pod \"d7480298-e4a8-4010-a526-9ca1dba08f71\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.105864 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-inventory\") pod \"d7480298-e4a8-4010-a526-9ca1dba08f71\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.105945 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-ssh-key\") pod \"d7480298-e4a8-4010-a526-9ca1dba08f71\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.106012 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-neutron-metadata-combined-ca-bundle\") pod \"d7480298-e4a8-4010-a526-9ca1dba08f71\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.106059 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-nova-metadata-neutron-config-0\") pod \"d7480298-e4a8-4010-a526-9ca1dba08f71\" (UID: \"d7480298-e4a8-4010-a526-9ca1dba08f71\") " Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.112686 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7480298-e4a8-4010-a526-9ca1dba08f71-kube-api-access-4zr26" (OuterVolumeSpecName: "kube-api-access-4zr26") pod "d7480298-e4a8-4010-a526-9ca1dba08f71" (UID: "d7480298-e4a8-4010-a526-9ca1dba08f71"). InnerVolumeSpecName "kube-api-access-4zr26". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.115953 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "d7480298-e4a8-4010-a526-9ca1dba08f71" (UID: "d7480298-e4a8-4010-a526-9ca1dba08f71"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.142200 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "d7480298-e4a8-4010-a526-9ca1dba08f71" (UID: "d7480298-e4a8-4010-a526-9ca1dba08f71"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.142576 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d7480298-e4a8-4010-a526-9ca1dba08f71" (UID: "d7480298-e4a8-4010-a526-9ca1dba08f71"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.143752 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-inventory" (OuterVolumeSpecName: "inventory") pod "d7480298-e4a8-4010-a526-9ca1dba08f71" (UID: "d7480298-e4a8-4010-a526-9ca1dba08f71"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.144273 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "d7480298-e4a8-4010-a526-9ca1dba08f71" (UID: "d7480298-e4a8-4010-a526-9ca1dba08f71"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.208075 4689 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.208113 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zr26\" (UniqueName: \"kubernetes.io/projected/d7480298-e4a8-4010-a526-9ca1dba08f71-kube-api-access-4zr26\") on node \"crc\" DevicePath \"\"" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.208126 4689 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.208134 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.208144 4689 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.208156 4689 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d7480298-e4a8-4010-a526-9ca1dba08f71-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.577648 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.578064 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz" event={"ID":"d7480298-e4a8-4010-a526-9ca1dba08f71","Type":"ContainerDied","Data":"c8eed196b196d698e6312dbe2b050d8318b868e8bd717b6faa56edc5fe11db9f"} Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.578108 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8eed196b196d698e6312dbe2b050d8318b868e8bd717b6faa56edc5fe11db9f" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.577800 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9fwhg" podUID="47320446-450b-4f65-8b40-339eb2e6ab37" containerName="registry-server" containerID="cri-o://f89d8969fc62736522e2e92ccc88546658fe2381d014cdc5f594a8bf8aae8c41" gracePeriod=2 Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.730671 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c"] Oct 13 21:45:13 crc kubenswrapper[4689]: E1013 21:45:13.731222 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7480298-e4a8-4010-a526-9ca1dba08f71" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.731259 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7480298-e4a8-4010-a526-9ca1dba08f71" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 13 21:45:13 crc kubenswrapper[4689]: E1013 21:45:13.731287 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee037022-8806-44b2-b9b6-8c0f07d91dfd" containerName="collect-profiles" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.731309 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee037022-8806-44b2-b9b6-8c0f07d91dfd" containerName="collect-profiles" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.731707 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7480298-e4a8-4010-a526-9ca1dba08f71" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.731731 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee037022-8806-44b2-b9b6-8c0f07d91dfd" containerName="collect-profiles" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.732565 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.734950 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.735157 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.735527 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.735727 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d5nnx" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.737440 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.743033 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c"] Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.921718 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.921803 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.921835 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.921875 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brw4h\" (UniqueName: \"kubernetes.io/projected/5c27fced-a27b-4b4f-bc40-cdcb566eb633-kube-api-access-brw4h\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:45:13 crc kubenswrapper[4689]: I1013 21:45:13.921901 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.023530 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.024747 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.024875 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.025123 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brw4h\" (UniqueName: \"kubernetes.io/projected/5c27fced-a27b-4b4f-bc40-cdcb566eb633-kube-api-access-brw4h\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.025323 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.029406 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.029685 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.032219 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.032612 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.041575 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brw4h\" (UniqueName: \"kubernetes.io/projected/5c27fced-a27b-4b4f-bc40-cdcb566eb633-kube-api-access-brw4h\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.099577 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.110187 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9fwhg" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.230976 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47320446-450b-4f65-8b40-339eb2e6ab37-catalog-content\") pod \"47320446-450b-4f65-8b40-339eb2e6ab37\" (UID: \"47320446-450b-4f65-8b40-339eb2e6ab37\") " Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.231391 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47320446-450b-4f65-8b40-339eb2e6ab37-utilities\") pod \"47320446-450b-4f65-8b40-339eb2e6ab37\" (UID: \"47320446-450b-4f65-8b40-339eb2e6ab37\") " Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.231458 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cgkp\" (UniqueName: \"kubernetes.io/projected/47320446-450b-4f65-8b40-339eb2e6ab37-kube-api-access-7cgkp\") pod \"47320446-450b-4f65-8b40-339eb2e6ab37\" (UID: \"47320446-450b-4f65-8b40-339eb2e6ab37\") " Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.233801 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47320446-450b-4f65-8b40-339eb2e6ab37-utilities" (OuterVolumeSpecName: "utilities") pod "47320446-450b-4f65-8b40-339eb2e6ab37" (UID: "47320446-450b-4f65-8b40-339eb2e6ab37"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.236780 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47320446-450b-4f65-8b40-339eb2e6ab37-kube-api-access-7cgkp" (OuterVolumeSpecName: "kube-api-access-7cgkp") pod "47320446-450b-4f65-8b40-339eb2e6ab37" (UID: "47320446-450b-4f65-8b40-339eb2e6ab37"). InnerVolumeSpecName "kube-api-access-7cgkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.334217 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47320446-450b-4f65-8b40-339eb2e6ab37-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.334248 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cgkp\" (UniqueName: \"kubernetes.io/projected/47320446-450b-4f65-8b40-339eb2e6ab37-kube-api-access-7cgkp\") on node \"crc\" DevicePath \"\"" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.587192 4689 generic.go:334] "Generic (PLEG): container finished" podID="47320446-450b-4f65-8b40-339eb2e6ab37" containerID="f89d8969fc62736522e2e92ccc88546658fe2381d014cdc5f594a8bf8aae8c41" exitCode=0 Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.587242 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9fwhg" event={"ID":"47320446-450b-4f65-8b40-339eb2e6ab37","Type":"ContainerDied","Data":"f89d8969fc62736522e2e92ccc88546658fe2381d014cdc5f594a8bf8aae8c41"} Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.587275 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9fwhg" event={"ID":"47320446-450b-4f65-8b40-339eb2e6ab37","Type":"ContainerDied","Data":"70271e971b403445d30a1043f01bc257819491743bd0b24b011979305eae7282"} Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.587294 4689 scope.go:117] "RemoveContainer" containerID="f89d8969fc62736522e2e92ccc88546658fe2381d014cdc5f594a8bf8aae8c41" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.587479 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9fwhg" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.601964 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c"] Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.610337 4689 scope.go:117] "RemoveContainer" containerID="0c4d86abd8e89163ba0e8e844e66bbb352e84d9b2f16159bf2a05deafae56c6f" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.633670 4689 scope.go:117] "RemoveContainer" containerID="259da1631f7b88ddb05c812f072c1ee2ab277d69b0cdf4d7d1171c42972959ae" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.655098 4689 scope.go:117] "RemoveContainer" containerID="f89d8969fc62736522e2e92ccc88546658fe2381d014cdc5f594a8bf8aae8c41" Oct 13 21:45:14 crc kubenswrapper[4689]: E1013 21:45:14.655536 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f89d8969fc62736522e2e92ccc88546658fe2381d014cdc5f594a8bf8aae8c41\": container with ID starting with f89d8969fc62736522e2e92ccc88546658fe2381d014cdc5f594a8bf8aae8c41 not found: ID does not exist" containerID="f89d8969fc62736522e2e92ccc88546658fe2381d014cdc5f594a8bf8aae8c41" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.655572 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f89d8969fc62736522e2e92ccc88546658fe2381d014cdc5f594a8bf8aae8c41"} err="failed to get container status \"f89d8969fc62736522e2e92ccc88546658fe2381d014cdc5f594a8bf8aae8c41\": rpc error: code = NotFound desc = could not find container \"f89d8969fc62736522e2e92ccc88546658fe2381d014cdc5f594a8bf8aae8c41\": container with ID starting with f89d8969fc62736522e2e92ccc88546658fe2381d014cdc5f594a8bf8aae8c41 not found: ID does not exist" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.655606 4689 scope.go:117] "RemoveContainer" containerID="0c4d86abd8e89163ba0e8e844e66bbb352e84d9b2f16159bf2a05deafae56c6f" Oct 13 21:45:14 crc kubenswrapper[4689]: E1013 21:45:14.655874 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c4d86abd8e89163ba0e8e844e66bbb352e84d9b2f16159bf2a05deafae56c6f\": container with ID starting with 0c4d86abd8e89163ba0e8e844e66bbb352e84d9b2f16159bf2a05deafae56c6f not found: ID does not exist" containerID="0c4d86abd8e89163ba0e8e844e66bbb352e84d9b2f16159bf2a05deafae56c6f" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.655896 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c4d86abd8e89163ba0e8e844e66bbb352e84d9b2f16159bf2a05deafae56c6f"} err="failed to get container status \"0c4d86abd8e89163ba0e8e844e66bbb352e84d9b2f16159bf2a05deafae56c6f\": rpc error: code = NotFound desc = could not find container \"0c4d86abd8e89163ba0e8e844e66bbb352e84d9b2f16159bf2a05deafae56c6f\": container with ID starting with 0c4d86abd8e89163ba0e8e844e66bbb352e84d9b2f16159bf2a05deafae56c6f not found: ID does not exist" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.655911 4689 scope.go:117] "RemoveContainer" containerID="259da1631f7b88ddb05c812f072c1ee2ab277d69b0cdf4d7d1171c42972959ae" Oct 13 21:45:14 crc kubenswrapper[4689]: E1013 21:45:14.656305 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"259da1631f7b88ddb05c812f072c1ee2ab277d69b0cdf4d7d1171c42972959ae\": container with ID starting with 259da1631f7b88ddb05c812f072c1ee2ab277d69b0cdf4d7d1171c42972959ae not found: ID does not exist" containerID="259da1631f7b88ddb05c812f072c1ee2ab277d69b0cdf4d7d1171c42972959ae" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.656348 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"259da1631f7b88ddb05c812f072c1ee2ab277d69b0cdf4d7d1171c42972959ae"} err="failed to get container status \"259da1631f7b88ddb05c812f072c1ee2ab277d69b0cdf4d7d1171c42972959ae\": rpc error: code = NotFound desc = could not find container \"259da1631f7b88ddb05c812f072c1ee2ab277d69b0cdf4d7d1171c42972959ae\": container with ID starting with 259da1631f7b88ddb05c812f072c1ee2ab277d69b0cdf4d7d1171c42972959ae not found: ID does not exist" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.772531 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47320446-450b-4f65-8b40-339eb2e6ab37-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "47320446-450b-4f65-8b40-339eb2e6ab37" (UID: "47320446-450b-4f65-8b40-339eb2e6ab37"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.841569 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47320446-450b-4f65-8b40-339eb2e6ab37-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.946290 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9fwhg"] Oct 13 21:45:14 crc kubenswrapper[4689]: I1013 21:45:14.955321 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9fwhg"] Oct 13 21:45:15 crc kubenswrapper[4689]: I1013 21:45:15.603923 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" event={"ID":"5c27fced-a27b-4b4f-bc40-cdcb566eb633","Type":"ContainerStarted","Data":"d01d47c26ffd0a5fc2ea0a42bddd08908f4d142cc252e0f8b5508f2a4919b5d8"} Oct 13 21:45:15 crc kubenswrapper[4689]: I1013 21:45:15.603971 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" event={"ID":"5c27fced-a27b-4b4f-bc40-cdcb566eb633","Type":"ContainerStarted","Data":"e3544b272d998c7432e86c51516b47af598ed591968e0bf6f8f980c8fd479089"} Oct 13 21:45:15 crc kubenswrapper[4689]: I1013 21:45:15.626274 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" podStartSLOduration=2.157898673 podStartE2EDuration="2.62625647s" podCreationTimestamp="2025-10-13 21:45:13 +0000 UTC" firstStartedPulling="2025-10-13 21:45:14.610426377 +0000 UTC m=+2031.528671462" lastFinishedPulling="2025-10-13 21:45:15.078784184 +0000 UTC m=+2031.997029259" observedRunningTime="2025-10-13 21:45:15.623203098 +0000 UTC m=+2032.541448183" watchObservedRunningTime="2025-10-13 21:45:15.62625647 +0000 UTC m=+2032.544501545" Oct 13 21:45:15 crc kubenswrapper[4689]: I1013 21:45:15.880973 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47320446-450b-4f65-8b40-339eb2e6ab37" path="/var/lib/kubelet/pods/47320446-450b-4f65-8b40-339eb2e6ab37/volumes" Oct 13 21:45:27 crc kubenswrapper[4689]: I1013 21:45:27.741482 4689 scope.go:117] "RemoveContainer" containerID="cbf69b2aebc182bb9237fb099da4904c439d49bc380d5c3ff2770472e256db07" Oct 13 21:46:22 crc kubenswrapper[4689]: I1013 21:46:22.695315 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-d4fwz"] Oct 13 21:46:22 crc kubenswrapper[4689]: E1013 21:46:22.696322 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47320446-450b-4f65-8b40-339eb2e6ab37" containerName="registry-server" Oct 13 21:46:22 crc kubenswrapper[4689]: I1013 21:46:22.696336 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="47320446-450b-4f65-8b40-339eb2e6ab37" containerName="registry-server" Oct 13 21:46:22 crc kubenswrapper[4689]: E1013 21:46:22.696350 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47320446-450b-4f65-8b40-339eb2e6ab37" containerName="extract-utilities" Oct 13 21:46:22 crc kubenswrapper[4689]: I1013 21:46:22.696357 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="47320446-450b-4f65-8b40-339eb2e6ab37" containerName="extract-utilities" Oct 13 21:46:22 crc kubenswrapper[4689]: E1013 21:46:22.696372 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47320446-450b-4f65-8b40-339eb2e6ab37" containerName="extract-content" Oct 13 21:46:22 crc kubenswrapper[4689]: I1013 21:46:22.696379 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="47320446-450b-4f65-8b40-339eb2e6ab37" containerName="extract-content" Oct 13 21:46:22 crc kubenswrapper[4689]: I1013 21:46:22.696556 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="47320446-450b-4f65-8b40-339eb2e6ab37" containerName="registry-server" Oct 13 21:46:22 crc kubenswrapper[4689]: I1013 21:46:22.697935 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d4fwz" Oct 13 21:46:22 crc kubenswrapper[4689]: I1013 21:46:22.707772 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d4fwz"] Oct 13 21:46:22 crc kubenswrapper[4689]: I1013 21:46:22.758757 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b112f721-5e09-4592-9ffe-7514d003604a-catalog-content\") pod \"community-operators-d4fwz\" (UID: \"b112f721-5e09-4592-9ffe-7514d003604a\") " pod="openshift-marketplace/community-operators-d4fwz" Oct 13 21:46:22 crc kubenswrapper[4689]: I1013 21:46:22.758812 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps9wz\" (UniqueName: \"kubernetes.io/projected/b112f721-5e09-4592-9ffe-7514d003604a-kube-api-access-ps9wz\") pod \"community-operators-d4fwz\" (UID: \"b112f721-5e09-4592-9ffe-7514d003604a\") " pod="openshift-marketplace/community-operators-d4fwz" Oct 13 21:46:22 crc kubenswrapper[4689]: I1013 21:46:22.758957 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b112f721-5e09-4592-9ffe-7514d003604a-utilities\") pod \"community-operators-d4fwz\" (UID: \"b112f721-5e09-4592-9ffe-7514d003604a\") " pod="openshift-marketplace/community-operators-d4fwz" Oct 13 21:46:22 crc kubenswrapper[4689]: I1013 21:46:22.860741 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b112f721-5e09-4592-9ffe-7514d003604a-catalog-content\") pod \"community-operators-d4fwz\" (UID: \"b112f721-5e09-4592-9ffe-7514d003604a\") " pod="openshift-marketplace/community-operators-d4fwz" Oct 13 21:46:22 crc kubenswrapper[4689]: I1013 21:46:22.860791 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps9wz\" (UniqueName: \"kubernetes.io/projected/b112f721-5e09-4592-9ffe-7514d003604a-kube-api-access-ps9wz\") pod \"community-operators-d4fwz\" (UID: \"b112f721-5e09-4592-9ffe-7514d003604a\") " pod="openshift-marketplace/community-operators-d4fwz" Oct 13 21:46:22 crc kubenswrapper[4689]: I1013 21:46:22.860831 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b112f721-5e09-4592-9ffe-7514d003604a-utilities\") pod \"community-operators-d4fwz\" (UID: \"b112f721-5e09-4592-9ffe-7514d003604a\") " pod="openshift-marketplace/community-operators-d4fwz" Oct 13 21:46:22 crc kubenswrapper[4689]: I1013 21:46:22.861253 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b112f721-5e09-4592-9ffe-7514d003604a-catalog-content\") pod \"community-operators-d4fwz\" (UID: \"b112f721-5e09-4592-9ffe-7514d003604a\") " pod="openshift-marketplace/community-operators-d4fwz" Oct 13 21:46:22 crc kubenswrapper[4689]: I1013 21:46:22.861300 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b112f721-5e09-4592-9ffe-7514d003604a-utilities\") pod \"community-operators-d4fwz\" (UID: \"b112f721-5e09-4592-9ffe-7514d003604a\") " pod="openshift-marketplace/community-operators-d4fwz" Oct 13 21:46:22 crc kubenswrapper[4689]: I1013 21:46:22.884251 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps9wz\" (UniqueName: \"kubernetes.io/projected/b112f721-5e09-4592-9ffe-7514d003604a-kube-api-access-ps9wz\") pod \"community-operators-d4fwz\" (UID: \"b112f721-5e09-4592-9ffe-7514d003604a\") " pod="openshift-marketplace/community-operators-d4fwz" Oct 13 21:46:23 crc kubenswrapper[4689]: I1013 21:46:23.071385 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d4fwz" Oct 13 21:46:23 crc kubenswrapper[4689]: I1013 21:46:23.599332 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d4fwz"] Oct 13 21:46:24 crc kubenswrapper[4689]: I1013 21:46:24.193946 4689 generic.go:334] "Generic (PLEG): container finished" podID="b112f721-5e09-4592-9ffe-7514d003604a" containerID="2dabeaa3f251a6d5ce03d94a934787719bf9a57ad56ff760c4a131d0d602a6db" exitCode=0 Oct 13 21:46:24 crc kubenswrapper[4689]: I1013 21:46:24.194017 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d4fwz" event={"ID":"b112f721-5e09-4592-9ffe-7514d003604a","Type":"ContainerDied","Data":"2dabeaa3f251a6d5ce03d94a934787719bf9a57ad56ff760c4a131d0d602a6db"} Oct 13 21:46:24 crc kubenswrapper[4689]: I1013 21:46:24.194304 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d4fwz" event={"ID":"b112f721-5e09-4592-9ffe-7514d003604a","Type":"ContainerStarted","Data":"852c6fab3fd353e28cce405f706acd105711d9c3485355d0eb85bfa51f138810"} Oct 13 21:46:26 crc kubenswrapper[4689]: I1013 21:46:26.215047 4689 generic.go:334] "Generic (PLEG): container finished" podID="b112f721-5e09-4592-9ffe-7514d003604a" containerID="31d1631ef47957c3e74b195e0895d3c5912376087172c795c4b77a0c58ecf216" exitCode=0 Oct 13 21:46:26 crc kubenswrapper[4689]: I1013 21:46:26.215294 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d4fwz" event={"ID":"b112f721-5e09-4592-9ffe-7514d003604a","Type":"ContainerDied","Data":"31d1631ef47957c3e74b195e0895d3c5912376087172c795c4b77a0c58ecf216"} Oct 13 21:46:27 crc kubenswrapper[4689]: I1013 21:46:27.226627 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d4fwz" event={"ID":"b112f721-5e09-4592-9ffe-7514d003604a","Type":"ContainerStarted","Data":"8087a9ef92a3cf50000cf1184c8b45af3ee404d662932d2deb1bd57f7984b1a3"} Oct 13 21:46:27 crc kubenswrapper[4689]: I1013 21:46:27.249973 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-d4fwz" podStartSLOduration=2.7259539029999997 podStartE2EDuration="5.249932557s" podCreationTimestamp="2025-10-13 21:46:22 +0000 UTC" firstStartedPulling="2025-10-13 21:46:24.195415459 +0000 UTC m=+2101.113660544" lastFinishedPulling="2025-10-13 21:46:26.719394113 +0000 UTC m=+2103.637639198" observedRunningTime="2025-10-13 21:46:27.247175822 +0000 UTC m=+2104.165420907" watchObservedRunningTime="2025-10-13 21:46:27.249932557 +0000 UTC m=+2104.168177642" Oct 13 21:46:33 crc kubenswrapper[4689]: I1013 21:46:33.072262 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-d4fwz" Oct 13 21:46:33 crc kubenswrapper[4689]: I1013 21:46:33.072615 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-d4fwz" Oct 13 21:46:33 crc kubenswrapper[4689]: I1013 21:46:33.116503 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-d4fwz" Oct 13 21:46:33 crc kubenswrapper[4689]: I1013 21:46:33.320926 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-d4fwz" Oct 13 21:46:33 crc kubenswrapper[4689]: I1013 21:46:33.358111 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2nw8m"] Oct 13 21:46:33 crc kubenswrapper[4689]: I1013 21:46:33.360510 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2nw8m" Oct 13 21:46:33 crc kubenswrapper[4689]: I1013 21:46:33.371386 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2nw8m"] Oct 13 21:46:33 crc kubenswrapper[4689]: I1013 21:46:33.389264 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/868caa9e-679e-4354-a046-52f93d9b728a-utilities\") pod \"certified-operators-2nw8m\" (UID: \"868caa9e-679e-4354-a046-52f93d9b728a\") " pod="openshift-marketplace/certified-operators-2nw8m" Oct 13 21:46:33 crc kubenswrapper[4689]: I1013 21:46:33.389705 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmb6j\" (UniqueName: \"kubernetes.io/projected/868caa9e-679e-4354-a046-52f93d9b728a-kube-api-access-nmb6j\") pod \"certified-operators-2nw8m\" (UID: \"868caa9e-679e-4354-a046-52f93d9b728a\") " pod="openshift-marketplace/certified-operators-2nw8m" Oct 13 21:46:33 crc kubenswrapper[4689]: I1013 21:46:33.389878 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/868caa9e-679e-4354-a046-52f93d9b728a-catalog-content\") pod \"certified-operators-2nw8m\" (UID: \"868caa9e-679e-4354-a046-52f93d9b728a\") " pod="openshift-marketplace/certified-operators-2nw8m" Oct 13 21:46:33 crc kubenswrapper[4689]: I1013 21:46:33.491479 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmb6j\" (UniqueName: \"kubernetes.io/projected/868caa9e-679e-4354-a046-52f93d9b728a-kube-api-access-nmb6j\") pod \"certified-operators-2nw8m\" (UID: \"868caa9e-679e-4354-a046-52f93d9b728a\") " pod="openshift-marketplace/certified-operators-2nw8m" Oct 13 21:46:33 crc kubenswrapper[4689]: I1013 21:46:33.491916 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/868caa9e-679e-4354-a046-52f93d9b728a-catalog-content\") pod \"certified-operators-2nw8m\" (UID: \"868caa9e-679e-4354-a046-52f93d9b728a\") " pod="openshift-marketplace/certified-operators-2nw8m" Oct 13 21:46:33 crc kubenswrapper[4689]: I1013 21:46:33.492004 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/868caa9e-679e-4354-a046-52f93d9b728a-utilities\") pod \"certified-operators-2nw8m\" (UID: \"868caa9e-679e-4354-a046-52f93d9b728a\") " pod="openshift-marketplace/certified-operators-2nw8m" Oct 13 21:46:33 crc kubenswrapper[4689]: I1013 21:46:33.492327 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/868caa9e-679e-4354-a046-52f93d9b728a-catalog-content\") pod \"certified-operators-2nw8m\" (UID: \"868caa9e-679e-4354-a046-52f93d9b728a\") " pod="openshift-marketplace/certified-operators-2nw8m" Oct 13 21:46:33 crc kubenswrapper[4689]: I1013 21:46:33.492385 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/868caa9e-679e-4354-a046-52f93d9b728a-utilities\") pod \"certified-operators-2nw8m\" (UID: \"868caa9e-679e-4354-a046-52f93d9b728a\") " pod="openshift-marketplace/certified-operators-2nw8m" Oct 13 21:46:33 crc kubenswrapper[4689]: I1013 21:46:33.513492 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmb6j\" (UniqueName: \"kubernetes.io/projected/868caa9e-679e-4354-a046-52f93d9b728a-kube-api-access-nmb6j\") pod \"certified-operators-2nw8m\" (UID: \"868caa9e-679e-4354-a046-52f93d9b728a\") " pod="openshift-marketplace/certified-operators-2nw8m" Oct 13 21:46:33 crc kubenswrapper[4689]: I1013 21:46:33.683573 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2nw8m" Oct 13 21:46:34 crc kubenswrapper[4689]: I1013 21:46:34.189902 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2nw8m"] Oct 13 21:46:34 crc kubenswrapper[4689]: I1013 21:46:34.288369 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2nw8m" event={"ID":"868caa9e-679e-4354-a046-52f93d9b728a","Type":"ContainerStarted","Data":"c6a07d5d15544bb0709a4b5e3a26cc4f1a49df95cc835e38e4f8eb463115e74a"} Oct 13 21:46:35 crc kubenswrapper[4689]: I1013 21:46:35.301054 4689 generic.go:334] "Generic (PLEG): container finished" podID="868caa9e-679e-4354-a046-52f93d9b728a" containerID="b643f5bae612c3d385357750e690a257bd04aa84fad9084e1b679bcbad1f89a9" exitCode=0 Oct 13 21:46:35 crc kubenswrapper[4689]: I1013 21:46:35.301101 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2nw8m" event={"ID":"868caa9e-679e-4354-a046-52f93d9b728a","Type":"ContainerDied","Data":"b643f5bae612c3d385357750e690a257bd04aa84fad9084e1b679bcbad1f89a9"} Oct 13 21:46:35 crc kubenswrapper[4689]: I1013 21:46:35.304545 4689 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 21:46:35 crc kubenswrapper[4689]: I1013 21:46:35.548828 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d4fwz"] Oct 13 21:46:35 crc kubenswrapper[4689]: I1013 21:46:35.549295 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-d4fwz" podUID="b112f721-5e09-4592-9ffe-7514d003604a" containerName="registry-server" containerID="cri-o://8087a9ef92a3cf50000cf1184c8b45af3ee404d662932d2deb1bd57f7984b1a3" gracePeriod=2 Oct 13 21:46:35 crc kubenswrapper[4689]: I1013 21:46:35.986757 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d4fwz" Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.040132 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b112f721-5e09-4592-9ffe-7514d003604a-utilities\") pod \"b112f721-5e09-4592-9ffe-7514d003604a\" (UID: \"b112f721-5e09-4592-9ffe-7514d003604a\") " Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.040213 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ps9wz\" (UniqueName: \"kubernetes.io/projected/b112f721-5e09-4592-9ffe-7514d003604a-kube-api-access-ps9wz\") pod \"b112f721-5e09-4592-9ffe-7514d003604a\" (UID: \"b112f721-5e09-4592-9ffe-7514d003604a\") " Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.040277 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b112f721-5e09-4592-9ffe-7514d003604a-catalog-content\") pod \"b112f721-5e09-4592-9ffe-7514d003604a\" (UID: \"b112f721-5e09-4592-9ffe-7514d003604a\") " Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.041138 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b112f721-5e09-4592-9ffe-7514d003604a-utilities" (OuterVolumeSpecName: "utilities") pod "b112f721-5e09-4592-9ffe-7514d003604a" (UID: "b112f721-5e09-4592-9ffe-7514d003604a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.047483 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b112f721-5e09-4592-9ffe-7514d003604a-kube-api-access-ps9wz" (OuterVolumeSpecName: "kube-api-access-ps9wz") pod "b112f721-5e09-4592-9ffe-7514d003604a" (UID: "b112f721-5e09-4592-9ffe-7514d003604a"). InnerVolumeSpecName "kube-api-access-ps9wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.111170 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b112f721-5e09-4592-9ffe-7514d003604a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b112f721-5e09-4592-9ffe-7514d003604a" (UID: "b112f721-5e09-4592-9ffe-7514d003604a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.142240 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b112f721-5e09-4592-9ffe-7514d003604a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.142278 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b112f721-5e09-4592-9ffe-7514d003604a-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.142290 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ps9wz\" (UniqueName: \"kubernetes.io/projected/b112f721-5e09-4592-9ffe-7514d003604a-kube-api-access-ps9wz\") on node \"crc\" DevicePath \"\"" Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.311682 4689 generic.go:334] "Generic (PLEG): container finished" podID="b112f721-5e09-4592-9ffe-7514d003604a" containerID="8087a9ef92a3cf50000cf1184c8b45af3ee404d662932d2deb1bd57f7984b1a3" exitCode=0 Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.311769 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d4fwz" event={"ID":"b112f721-5e09-4592-9ffe-7514d003604a","Type":"ContainerDied","Data":"8087a9ef92a3cf50000cf1184c8b45af3ee404d662932d2deb1bd57f7984b1a3"} Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.311802 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d4fwz" event={"ID":"b112f721-5e09-4592-9ffe-7514d003604a","Type":"ContainerDied","Data":"852c6fab3fd353e28cce405f706acd105711d9c3485355d0eb85bfa51f138810"} Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.311822 4689 scope.go:117] "RemoveContainer" containerID="8087a9ef92a3cf50000cf1184c8b45af3ee404d662932d2deb1bd57f7984b1a3" Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.313362 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d4fwz" Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.313853 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2nw8m" event={"ID":"868caa9e-679e-4354-a046-52f93d9b728a","Type":"ContainerStarted","Data":"1bb052a0fd1ca6300746dafe854ff81f2e82d1811e69015dd655b27438a1b846"} Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.335458 4689 scope.go:117] "RemoveContainer" containerID="31d1631ef47957c3e74b195e0895d3c5912376087172c795c4b77a0c58ecf216" Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.352595 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d4fwz"] Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.361183 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-d4fwz"] Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.384789 4689 scope.go:117] "RemoveContainer" containerID="2dabeaa3f251a6d5ce03d94a934787719bf9a57ad56ff760c4a131d0d602a6db" Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.401067 4689 scope.go:117] "RemoveContainer" containerID="8087a9ef92a3cf50000cf1184c8b45af3ee404d662932d2deb1bd57f7984b1a3" Oct 13 21:46:36 crc kubenswrapper[4689]: E1013 21:46:36.401406 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8087a9ef92a3cf50000cf1184c8b45af3ee404d662932d2deb1bd57f7984b1a3\": container with ID starting with 8087a9ef92a3cf50000cf1184c8b45af3ee404d662932d2deb1bd57f7984b1a3 not found: ID does not exist" containerID="8087a9ef92a3cf50000cf1184c8b45af3ee404d662932d2deb1bd57f7984b1a3" Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.401435 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8087a9ef92a3cf50000cf1184c8b45af3ee404d662932d2deb1bd57f7984b1a3"} err="failed to get container status \"8087a9ef92a3cf50000cf1184c8b45af3ee404d662932d2deb1bd57f7984b1a3\": rpc error: code = NotFound desc = could not find container \"8087a9ef92a3cf50000cf1184c8b45af3ee404d662932d2deb1bd57f7984b1a3\": container with ID starting with 8087a9ef92a3cf50000cf1184c8b45af3ee404d662932d2deb1bd57f7984b1a3 not found: ID does not exist" Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.401456 4689 scope.go:117] "RemoveContainer" containerID="31d1631ef47957c3e74b195e0895d3c5912376087172c795c4b77a0c58ecf216" Oct 13 21:46:36 crc kubenswrapper[4689]: E1013 21:46:36.401866 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31d1631ef47957c3e74b195e0895d3c5912376087172c795c4b77a0c58ecf216\": container with ID starting with 31d1631ef47957c3e74b195e0895d3c5912376087172c795c4b77a0c58ecf216 not found: ID does not exist" containerID="31d1631ef47957c3e74b195e0895d3c5912376087172c795c4b77a0c58ecf216" Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.401909 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31d1631ef47957c3e74b195e0895d3c5912376087172c795c4b77a0c58ecf216"} err="failed to get container status \"31d1631ef47957c3e74b195e0895d3c5912376087172c795c4b77a0c58ecf216\": rpc error: code = NotFound desc = could not find container \"31d1631ef47957c3e74b195e0895d3c5912376087172c795c4b77a0c58ecf216\": container with ID starting with 31d1631ef47957c3e74b195e0895d3c5912376087172c795c4b77a0c58ecf216 not found: ID does not exist" Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.401942 4689 scope.go:117] "RemoveContainer" containerID="2dabeaa3f251a6d5ce03d94a934787719bf9a57ad56ff760c4a131d0d602a6db" Oct 13 21:46:36 crc kubenswrapper[4689]: E1013 21:46:36.402266 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2dabeaa3f251a6d5ce03d94a934787719bf9a57ad56ff760c4a131d0d602a6db\": container with ID starting with 2dabeaa3f251a6d5ce03d94a934787719bf9a57ad56ff760c4a131d0d602a6db not found: ID does not exist" containerID="2dabeaa3f251a6d5ce03d94a934787719bf9a57ad56ff760c4a131d0d602a6db" Oct 13 21:46:36 crc kubenswrapper[4689]: I1013 21:46:36.402296 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2dabeaa3f251a6d5ce03d94a934787719bf9a57ad56ff760c4a131d0d602a6db"} err="failed to get container status \"2dabeaa3f251a6d5ce03d94a934787719bf9a57ad56ff760c4a131d0d602a6db\": rpc error: code = NotFound desc = could not find container \"2dabeaa3f251a6d5ce03d94a934787719bf9a57ad56ff760c4a131d0d602a6db\": container with ID starting with 2dabeaa3f251a6d5ce03d94a934787719bf9a57ad56ff760c4a131d0d602a6db not found: ID does not exist" Oct 13 21:46:37 crc kubenswrapper[4689]: I1013 21:46:37.326826 4689 generic.go:334] "Generic (PLEG): container finished" podID="868caa9e-679e-4354-a046-52f93d9b728a" containerID="1bb052a0fd1ca6300746dafe854ff81f2e82d1811e69015dd655b27438a1b846" exitCode=0 Oct 13 21:46:37 crc kubenswrapper[4689]: I1013 21:46:37.327068 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2nw8m" event={"ID":"868caa9e-679e-4354-a046-52f93d9b728a","Type":"ContainerDied","Data":"1bb052a0fd1ca6300746dafe854ff81f2e82d1811e69015dd655b27438a1b846"} Oct 13 21:46:37 crc kubenswrapper[4689]: I1013 21:46:37.878810 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b112f721-5e09-4592-9ffe-7514d003604a" path="/var/lib/kubelet/pods/b112f721-5e09-4592-9ffe-7514d003604a/volumes" Oct 13 21:46:38 crc kubenswrapper[4689]: I1013 21:46:38.337977 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2nw8m" event={"ID":"868caa9e-679e-4354-a046-52f93d9b728a","Type":"ContainerStarted","Data":"9c6cde0007df1c5c9066e1ec434ffd40d47728df56146f129a725aa3923bd34c"} Oct 13 21:46:38 crc kubenswrapper[4689]: I1013 21:46:38.356937 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2nw8m" podStartSLOduration=2.927688411 podStartE2EDuration="5.356921246s" podCreationTimestamp="2025-10-13 21:46:33 +0000 UTC" firstStartedPulling="2025-10-13 21:46:35.304255782 +0000 UTC m=+2112.222500867" lastFinishedPulling="2025-10-13 21:46:37.733488607 +0000 UTC m=+2114.651733702" observedRunningTime="2025-10-13 21:46:38.354010607 +0000 UTC m=+2115.272255692" watchObservedRunningTime="2025-10-13 21:46:38.356921246 +0000 UTC m=+2115.275166331" Oct 13 21:46:43 crc kubenswrapper[4689]: I1013 21:46:43.684061 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2nw8m" Oct 13 21:46:43 crc kubenswrapper[4689]: I1013 21:46:43.684689 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2nw8m" Oct 13 21:46:43 crc kubenswrapper[4689]: I1013 21:46:43.735879 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2nw8m" Oct 13 21:46:44 crc kubenswrapper[4689]: I1013 21:46:44.478066 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2nw8m" Oct 13 21:46:44 crc kubenswrapper[4689]: I1013 21:46:44.537242 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2nw8m"] Oct 13 21:46:46 crc kubenswrapper[4689]: I1013 21:46:46.444293 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2nw8m" podUID="868caa9e-679e-4354-a046-52f93d9b728a" containerName="registry-server" containerID="cri-o://9c6cde0007df1c5c9066e1ec434ffd40d47728df56146f129a725aa3923bd34c" gracePeriod=2 Oct 13 21:46:46 crc kubenswrapper[4689]: I1013 21:46:46.886763 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2nw8m" Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.048205 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/868caa9e-679e-4354-a046-52f93d9b728a-catalog-content\") pod \"868caa9e-679e-4354-a046-52f93d9b728a\" (UID: \"868caa9e-679e-4354-a046-52f93d9b728a\") " Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.048301 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/868caa9e-679e-4354-a046-52f93d9b728a-utilities\") pod \"868caa9e-679e-4354-a046-52f93d9b728a\" (UID: \"868caa9e-679e-4354-a046-52f93d9b728a\") " Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.048416 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmb6j\" (UniqueName: \"kubernetes.io/projected/868caa9e-679e-4354-a046-52f93d9b728a-kube-api-access-nmb6j\") pod \"868caa9e-679e-4354-a046-52f93d9b728a\" (UID: \"868caa9e-679e-4354-a046-52f93d9b728a\") " Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.049115 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/868caa9e-679e-4354-a046-52f93d9b728a-utilities" (OuterVolumeSpecName: "utilities") pod "868caa9e-679e-4354-a046-52f93d9b728a" (UID: "868caa9e-679e-4354-a046-52f93d9b728a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.060183 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/868caa9e-679e-4354-a046-52f93d9b728a-kube-api-access-nmb6j" (OuterVolumeSpecName: "kube-api-access-nmb6j") pod "868caa9e-679e-4354-a046-52f93d9b728a" (UID: "868caa9e-679e-4354-a046-52f93d9b728a"). InnerVolumeSpecName "kube-api-access-nmb6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.105868 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/868caa9e-679e-4354-a046-52f93d9b728a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "868caa9e-679e-4354-a046-52f93d9b728a" (UID: "868caa9e-679e-4354-a046-52f93d9b728a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.150516 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/868caa9e-679e-4354-a046-52f93d9b728a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.150899 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/868caa9e-679e-4354-a046-52f93d9b728a-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.150945 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmb6j\" (UniqueName: \"kubernetes.io/projected/868caa9e-679e-4354-a046-52f93d9b728a-kube-api-access-nmb6j\") on node \"crc\" DevicePath \"\"" Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.455977 4689 generic.go:334] "Generic (PLEG): container finished" podID="868caa9e-679e-4354-a046-52f93d9b728a" containerID="9c6cde0007df1c5c9066e1ec434ffd40d47728df56146f129a725aa3923bd34c" exitCode=0 Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.456034 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2nw8m" Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.456035 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2nw8m" event={"ID":"868caa9e-679e-4354-a046-52f93d9b728a","Type":"ContainerDied","Data":"9c6cde0007df1c5c9066e1ec434ffd40d47728df56146f129a725aa3923bd34c"} Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.456100 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2nw8m" event={"ID":"868caa9e-679e-4354-a046-52f93d9b728a","Type":"ContainerDied","Data":"c6a07d5d15544bb0709a4b5e3a26cc4f1a49df95cc835e38e4f8eb463115e74a"} Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.456119 4689 scope.go:117] "RemoveContainer" containerID="9c6cde0007df1c5c9066e1ec434ffd40d47728df56146f129a725aa3923bd34c" Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.484827 4689 scope.go:117] "RemoveContainer" containerID="1bb052a0fd1ca6300746dafe854ff81f2e82d1811e69015dd655b27438a1b846" Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.499653 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2nw8m"] Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.506123 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2nw8m"] Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.509571 4689 scope.go:117] "RemoveContainer" containerID="b643f5bae612c3d385357750e690a257bd04aa84fad9084e1b679bcbad1f89a9" Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.563328 4689 scope.go:117] "RemoveContainer" containerID="9c6cde0007df1c5c9066e1ec434ffd40d47728df56146f129a725aa3923bd34c" Oct 13 21:46:47 crc kubenswrapper[4689]: E1013 21:46:47.567321 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c6cde0007df1c5c9066e1ec434ffd40d47728df56146f129a725aa3923bd34c\": container with ID starting with 9c6cde0007df1c5c9066e1ec434ffd40d47728df56146f129a725aa3923bd34c not found: ID does not exist" containerID="9c6cde0007df1c5c9066e1ec434ffd40d47728df56146f129a725aa3923bd34c" Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.567377 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c6cde0007df1c5c9066e1ec434ffd40d47728df56146f129a725aa3923bd34c"} err="failed to get container status \"9c6cde0007df1c5c9066e1ec434ffd40d47728df56146f129a725aa3923bd34c\": rpc error: code = NotFound desc = could not find container \"9c6cde0007df1c5c9066e1ec434ffd40d47728df56146f129a725aa3923bd34c\": container with ID starting with 9c6cde0007df1c5c9066e1ec434ffd40d47728df56146f129a725aa3923bd34c not found: ID does not exist" Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.567410 4689 scope.go:117] "RemoveContainer" containerID="1bb052a0fd1ca6300746dafe854ff81f2e82d1811e69015dd655b27438a1b846" Oct 13 21:46:47 crc kubenswrapper[4689]: E1013 21:46:47.568065 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bb052a0fd1ca6300746dafe854ff81f2e82d1811e69015dd655b27438a1b846\": container with ID starting with 1bb052a0fd1ca6300746dafe854ff81f2e82d1811e69015dd655b27438a1b846 not found: ID does not exist" containerID="1bb052a0fd1ca6300746dafe854ff81f2e82d1811e69015dd655b27438a1b846" Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.568120 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bb052a0fd1ca6300746dafe854ff81f2e82d1811e69015dd655b27438a1b846"} err="failed to get container status \"1bb052a0fd1ca6300746dafe854ff81f2e82d1811e69015dd655b27438a1b846\": rpc error: code = NotFound desc = could not find container \"1bb052a0fd1ca6300746dafe854ff81f2e82d1811e69015dd655b27438a1b846\": container with ID starting with 1bb052a0fd1ca6300746dafe854ff81f2e82d1811e69015dd655b27438a1b846 not found: ID does not exist" Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.568153 4689 scope.go:117] "RemoveContainer" containerID="b643f5bae612c3d385357750e690a257bd04aa84fad9084e1b679bcbad1f89a9" Oct 13 21:46:47 crc kubenswrapper[4689]: E1013 21:46:47.568833 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b643f5bae612c3d385357750e690a257bd04aa84fad9084e1b679bcbad1f89a9\": container with ID starting with b643f5bae612c3d385357750e690a257bd04aa84fad9084e1b679bcbad1f89a9 not found: ID does not exist" containerID="b643f5bae612c3d385357750e690a257bd04aa84fad9084e1b679bcbad1f89a9" Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.568862 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b643f5bae612c3d385357750e690a257bd04aa84fad9084e1b679bcbad1f89a9"} err="failed to get container status \"b643f5bae612c3d385357750e690a257bd04aa84fad9084e1b679bcbad1f89a9\": rpc error: code = NotFound desc = could not find container \"b643f5bae612c3d385357750e690a257bd04aa84fad9084e1b679bcbad1f89a9\": container with ID starting with b643f5bae612c3d385357750e690a257bd04aa84fad9084e1b679bcbad1f89a9 not found: ID does not exist" Oct 13 21:46:47 crc kubenswrapper[4689]: I1013 21:46:47.878266 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="868caa9e-679e-4354-a046-52f93d9b728a" path="/var/lib/kubelet/pods/868caa9e-679e-4354-a046-52f93d9b728a/volumes" Oct 13 21:46:53 crc kubenswrapper[4689]: I1013 21:46:53.858766 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:46:53 crc kubenswrapper[4689]: I1013 21:46:53.859228 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:47:23 crc kubenswrapper[4689]: I1013 21:47:23.859703 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:47:23 crc kubenswrapper[4689]: I1013 21:47:23.860955 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:47:53 crc kubenswrapper[4689]: I1013 21:47:53.858876 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:47:53 crc kubenswrapper[4689]: I1013 21:47:53.859490 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:47:53 crc kubenswrapper[4689]: I1013 21:47:53.859543 4689 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:47:53 crc kubenswrapper[4689]: I1013 21:47:53.860415 4689 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"abcf8c99b6135e589f4e832721e1f72c81d15fdf824349a4ecf448711e1087f4"} pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 21:47:53 crc kubenswrapper[4689]: I1013 21:47:53.860498 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" containerID="cri-o://abcf8c99b6135e589f4e832721e1f72c81d15fdf824349a4ecf448711e1087f4" gracePeriod=600 Oct 13 21:47:54 crc kubenswrapper[4689]: I1013 21:47:54.099147 4689 generic.go:334] "Generic (PLEG): container finished" podID="1863da92-265f-451e-a741-a184c8d3f781" containerID="abcf8c99b6135e589f4e832721e1f72c81d15fdf824349a4ecf448711e1087f4" exitCode=0 Oct 13 21:47:54 crc kubenswrapper[4689]: I1013 21:47:54.099199 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerDied","Data":"abcf8c99b6135e589f4e832721e1f72c81d15fdf824349a4ecf448711e1087f4"} Oct 13 21:47:54 crc kubenswrapper[4689]: I1013 21:47:54.099248 4689 scope.go:117] "RemoveContainer" containerID="553321fe84fbdbc9a32b7c49356d88acaf97983eef941385afaba69010e10737" Oct 13 21:47:55 crc kubenswrapper[4689]: I1013 21:47:55.109512 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerStarted","Data":"d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027"} Oct 13 21:49:59 crc kubenswrapper[4689]: I1013 21:49:59.228890 4689 generic.go:334] "Generic (PLEG): container finished" podID="5c27fced-a27b-4b4f-bc40-cdcb566eb633" containerID="d01d47c26ffd0a5fc2ea0a42bddd08908f4d142cc252e0f8b5508f2a4919b5d8" exitCode=0 Oct 13 21:49:59 crc kubenswrapper[4689]: I1013 21:49:59.228945 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" event={"ID":"5c27fced-a27b-4b4f-bc40-cdcb566eb633","Type":"ContainerDied","Data":"d01d47c26ffd0a5fc2ea0a42bddd08908f4d142cc252e0f8b5508f2a4919b5d8"} Oct 13 21:50:00 crc kubenswrapper[4689]: I1013 21:50:00.653796 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:50:00 crc kubenswrapper[4689]: I1013 21:50:00.741681 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-ssh-key\") pod \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " Oct 13 21:50:00 crc kubenswrapper[4689]: I1013 21:50:00.741777 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-libvirt-combined-ca-bundle\") pod \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " Oct 13 21:50:00 crc kubenswrapper[4689]: I1013 21:50:00.741828 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-inventory\") pod \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " Oct 13 21:50:00 crc kubenswrapper[4689]: I1013 21:50:00.741895 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brw4h\" (UniqueName: \"kubernetes.io/projected/5c27fced-a27b-4b4f-bc40-cdcb566eb633-kube-api-access-brw4h\") pod \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " Oct 13 21:50:00 crc kubenswrapper[4689]: I1013 21:50:00.742012 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-libvirt-secret-0\") pod \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\" (UID: \"5c27fced-a27b-4b4f-bc40-cdcb566eb633\") " Oct 13 21:50:00 crc kubenswrapper[4689]: I1013 21:50:00.748224 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "5c27fced-a27b-4b4f-bc40-cdcb566eb633" (UID: "5c27fced-a27b-4b4f-bc40-cdcb566eb633"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:50:00 crc kubenswrapper[4689]: I1013 21:50:00.749795 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c27fced-a27b-4b4f-bc40-cdcb566eb633-kube-api-access-brw4h" (OuterVolumeSpecName: "kube-api-access-brw4h") pod "5c27fced-a27b-4b4f-bc40-cdcb566eb633" (UID: "5c27fced-a27b-4b4f-bc40-cdcb566eb633"). InnerVolumeSpecName "kube-api-access-brw4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:50:00 crc kubenswrapper[4689]: I1013 21:50:00.769342 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "5c27fced-a27b-4b4f-bc40-cdcb566eb633" (UID: "5c27fced-a27b-4b4f-bc40-cdcb566eb633"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:50:00 crc kubenswrapper[4689]: I1013 21:50:00.775284 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-inventory" (OuterVolumeSpecName: "inventory") pod "5c27fced-a27b-4b4f-bc40-cdcb566eb633" (UID: "5c27fced-a27b-4b4f-bc40-cdcb566eb633"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:50:00 crc kubenswrapper[4689]: I1013 21:50:00.775422 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5c27fced-a27b-4b4f-bc40-cdcb566eb633" (UID: "5c27fced-a27b-4b4f-bc40-cdcb566eb633"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:50:00 crc kubenswrapper[4689]: I1013 21:50:00.844310 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:50:00 crc kubenswrapper[4689]: I1013 21:50:00.844354 4689 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:50:00 crc kubenswrapper[4689]: I1013 21:50:00.844370 4689 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 21:50:00 crc kubenswrapper[4689]: I1013 21:50:00.844382 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brw4h\" (UniqueName: \"kubernetes.io/projected/5c27fced-a27b-4b4f-bc40-cdcb566eb633-kube-api-access-brw4h\") on node \"crc\" DevicePath \"\"" Oct 13 21:50:00 crc kubenswrapper[4689]: I1013 21:50:00.844396 4689 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5c27fced-a27b-4b4f-bc40-cdcb566eb633-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.247632 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" event={"ID":"5c27fced-a27b-4b4f-bc40-cdcb566eb633","Type":"ContainerDied","Data":"e3544b272d998c7432e86c51516b47af598ed591968e0bf6f8f980c8fd479089"} Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.247681 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3544b272d998c7432e86c51516b47af598ed591968e0bf6f8f980c8fd479089" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.247774 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.323959 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9"] Oct 13 21:50:01 crc kubenswrapper[4689]: E1013 21:50:01.324400 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b112f721-5e09-4592-9ffe-7514d003604a" containerName="extract-content" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.324419 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="b112f721-5e09-4592-9ffe-7514d003604a" containerName="extract-content" Oct 13 21:50:01 crc kubenswrapper[4689]: E1013 21:50:01.324444 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b112f721-5e09-4592-9ffe-7514d003604a" containerName="extract-utilities" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.324452 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="b112f721-5e09-4592-9ffe-7514d003604a" containerName="extract-utilities" Oct 13 21:50:01 crc kubenswrapper[4689]: E1013 21:50:01.324475 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="868caa9e-679e-4354-a046-52f93d9b728a" containerName="extract-utilities" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.324482 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="868caa9e-679e-4354-a046-52f93d9b728a" containerName="extract-utilities" Oct 13 21:50:01 crc kubenswrapper[4689]: E1013 21:50:01.324498 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="868caa9e-679e-4354-a046-52f93d9b728a" containerName="extract-content" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.324506 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="868caa9e-679e-4354-a046-52f93d9b728a" containerName="extract-content" Oct 13 21:50:01 crc kubenswrapper[4689]: E1013 21:50:01.324538 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c27fced-a27b-4b4f-bc40-cdcb566eb633" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.324547 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c27fced-a27b-4b4f-bc40-cdcb566eb633" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 13 21:50:01 crc kubenswrapper[4689]: E1013 21:50:01.324569 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b112f721-5e09-4592-9ffe-7514d003604a" containerName="registry-server" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.324576 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="b112f721-5e09-4592-9ffe-7514d003604a" containerName="registry-server" Oct 13 21:50:01 crc kubenswrapper[4689]: E1013 21:50:01.324600 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="868caa9e-679e-4354-a046-52f93d9b728a" containerName="registry-server" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.324609 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="868caa9e-679e-4354-a046-52f93d9b728a" containerName="registry-server" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.324829 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="868caa9e-679e-4354-a046-52f93d9b728a" containerName="registry-server" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.324851 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c27fced-a27b-4b4f-bc40-cdcb566eb633" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.324861 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="b112f721-5e09-4592-9ffe-7514d003604a" containerName="registry-server" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.325598 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.328267 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.329626 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.331027 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.331174 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d5nnx" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.331219 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.331296 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.332054 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.338779 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9"] Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.465041 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.465105 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdpd4\" (UniqueName: \"kubernetes.io/projected/88053993-c10c-49d4-b69a-82c745001999-kube-api-access-zdpd4\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.465129 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/88053993-c10c-49d4-b69a-82c745001999-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.465144 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.465170 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.465232 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.465250 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.465304 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.465331 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.567326 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdpd4\" (UniqueName: \"kubernetes.io/projected/88053993-c10c-49d4-b69a-82c745001999-kube-api-access-zdpd4\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.567389 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.567411 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/88053993-c10c-49d4-b69a-82c745001999-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.567434 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.567501 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.567516 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.567573 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.567616 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.567663 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.570692 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/88053993-c10c-49d4-b69a-82c745001999-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.572965 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.574127 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.574188 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.574377 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.574411 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.575009 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.575422 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.583975 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdpd4\" (UniqueName: \"kubernetes.io/projected/88053993-c10c-49d4-b69a-82c745001999-kube-api-access-zdpd4\") pod \"nova-edpm-deployment-openstack-edpm-ipam-jqvm9\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:01 crc kubenswrapper[4689]: I1013 21:50:01.643113 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:50:02 crc kubenswrapper[4689]: I1013 21:50:02.147450 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9"] Oct 13 21:50:02 crc kubenswrapper[4689]: I1013 21:50:02.256553 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" event={"ID":"88053993-c10c-49d4-b69a-82c745001999","Type":"ContainerStarted","Data":"801f181ee9e472e17a92887311ef8f453f2b57324d5eb02f4bb6a34db4ee6525"} Oct 13 21:50:03 crc kubenswrapper[4689]: I1013 21:50:03.267976 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" event={"ID":"88053993-c10c-49d4-b69a-82c745001999","Type":"ContainerStarted","Data":"37dc90ce428717010c7f465a6d56e6f8a13fa931112439c04757a133c99a299b"} Oct 13 21:50:03 crc kubenswrapper[4689]: I1013 21:50:03.288995 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" podStartSLOduration=1.550698372 podStartE2EDuration="2.288974197s" podCreationTimestamp="2025-10-13 21:50:01 +0000 UTC" firstStartedPulling="2025-10-13 21:50:02.151494516 +0000 UTC m=+2319.069739601" lastFinishedPulling="2025-10-13 21:50:02.889770341 +0000 UTC m=+2319.808015426" observedRunningTime="2025-10-13 21:50:03.286837977 +0000 UTC m=+2320.205083062" watchObservedRunningTime="2025-10-13 21:50:03.288974197 +0000 UTC m=+2320.207219292" Oct 13 21:50:23 crc kubenswrapper[4689]: I1013 21:50:23.859509 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:50:23 crc kubenswrapper[4689]: I1013 21:50:23.860445 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:50:53 crc kubenswrapper[4689]: I1013 21:50:53.858532 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:50:53 crc kubenswrapper[4689]: I1013 21:50:53.859198 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:51:23 crc kubenswrapper[4689]: I1013 21:51:23.859262 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:51:23 crc kubenswrapper[4689]: I1013 21:51:23.859874 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:51:23 crc kubenswrapper[4689]: I1013 21:51:23.859933 4689 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:51:23 crc kubenswrapper[4689]: I1013 21:51:23.860742 4689 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027"} pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 21:51:23 crc kubenswrapper[4689]: I1013 21:51:23.860806 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" containerID="cri-o://d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" gracePeriod=600 Oct 13 21:51:23 crc kubenswrapper[4689]: E1013 21:51:23.980864 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:51:24 crc kubenswrapper[4689]: I1013 21:51:24.988635 4689 generic.go:334] "Generic (PLEG): container finished" podID="1863da92-265f-451e-a741-a184c8d3f781" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" exitCode=0 Oct 13 21:51:24 crc kubenswrapper[4689]: I1013 21:51:24.988717 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerDied","Data":"d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027"} Oct 13 21:51:24 crc kubenswrapper[4689]: I1013 21:51:24.988956 4689 scope.go:117] "RemoveContainer" containerID="abcf8c99b6135e589f4e832721e1f72c81d15fdf824349a4ecf448711e1087f4" Oct 13 21:51:24 crc kubenswrapper[4689]: I1013 21:51:24.989574 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:51:24 crc kubenswrapper[4689]: E1013 21:51:24.989878 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:51:39 crc kubenswrapper[4689]: I1013 21:51:39.867654 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:51:39 crc kubenswrapper[4689]: E1013 21:51:39.868443 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:51:53 crc kubenswrapper[4689]: I1013 21:51:53.874966 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:51:53 crc kubenswrapper[4689]: E1013 21:51:53.875968 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:52:04 crc kubenswrapper[4689]: I1013 21:52:04.868174 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:52:04 crc kubenswrapper[4689]: E1013 21:52:04.869268 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:52:15 crc kubenswrapper[4689]: I1013 21:52:15.868982 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:52:15 crc kubenswrapper[4689]: E1013 21:52:15.870672 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:52:27 crc kubenswrapper[4689]: I1013 21:52:27.867573 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:52:27 crc kubenswrapper[4689]: E1013 21:52:27.868481 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:52:39 crc kubenswrapper[4689]: I1013 21:52:39.867219 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:52:39 crc kubenswrapper[4689]: E1013 21:52:39.867976 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:52:52 crc kubenswrapper[4689]: I1013 21:52:52.867837 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:52:52 crc kubenswrapper[4689]: E1013 21:52:52.868853 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:53:04 crc kubenswrapper[4689]: I1013 21:53:04.866929 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:53:04 crc kubenswrapper[4689]: E1013 21:53:04.867622 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:53:18 crc kubenswrapper[4689]: I1013 21:53:18.867686 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:53:18 crc kubenswrapper[4689]: E1013 21:53:18.868729 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:53:29 crc kubenswrapper[4689]: I1013 21:53:29.867761 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:53:29 crc kubenswrapper[4689]: E1013 21:53:29.868621 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:53:41 crc kubenswrapper[4689]: I1013 21:53:41.868425 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:53:41 crc kubenswrapper[4689]: E1013 21:53:41.869305 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:53:42 crc kubenswrapper[4689]: I1013 21:53:42.270777 4689 generic.go:334] "Generic (PLEG): container finished" podID="88053993-c10c-49d4-b69a-82c745001999" containerID="37dc90ce428717010c7f465a6d56e6f8a13fa931112439c04757a133c99a299b" exitCode=0 Oct 13 21:53:42 crc kubenswrapper[4689]: I1013 21:53:42.270835 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" event={"ID":"88053993-c10c-49d4-b69a-82c745001999","Type":"ContainerDied","Data":"37dc90ce428717010c7f465a6d56e6f8a13fa931112439c04757a133c99a299b"} Oct 13 21:53:42 crc kubenswrapper[4689]: I1013 21:53:42.352903 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sk6x5"] Oct 13 21:53:42 crc kubenswrapper[4689]: I1013 21:53:42.355182 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sk6x5" Oct 13 21:53:42 crc kubenswrapper[4689]: I1013 21:53:42.373712 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sk6x5"] Oct 13 21:53:42 crc kubenswrapper[4689]: I1013 21:53:42.486185 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba225260-80d3-43b8-b47b-15ddf15a578c-utilities\") pod \"redhat-marketplace-sk6x5\" (UID: \"ba225260-80d3-43b8-b47b-15ddf15a578c\") " pod="openshift-marketplace/redhat-marketplace-sk6x5" Oct 13 21:53:42 crc kubenswrapper[4689]: I1013 21:53:42.486506 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba225260-80d3-43b8-b47b-15ddf15a578c-catalog-content\") pod \"redhat-marketplace-sk6x5\" (UID: \"ba225260-80d3-43b8-b47b-15ddf15a578c\") " pod="openshift-marketplace/redhat-marketplace-sk6x5" Oct 13 21:53:42 crc kubenswrapper[4689]: I1013 21:53:42.486648 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff5rj\" (UniqueName: \"kubernetes.io/projected/ba225260-80d3-43b8-b47b-15ddf15a578c-kube-api-access-ff5rj\") pod \"redhat-marketplace-sk6x5\" (UID: \"ba225260-80d3-43b8-b47b-15ddf15a578c\") " pod="openshift-marketplace/redhat-marketplace-sk6x5" Oct 13 21:53:42 crc kubenswrapper[4689]: I1013 21:53:42.588181 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba225260-80d3-43b8-b47b-15ddf15a578c-utilities\") pod \"redhat-marketplace-sk6x5\" (UID: \"ba225260-80d3-43b8-b47b-15ddf15a578c\") " pod="openshift-marketplace/redhat-marketplace-sk6x5" Oct 13 21:53:42 crc kubenswrapper[4689]: I1013 21:53:42.588245 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba225260-80d3-43b8-b47b-15ddf15a578c-catalog-content\") pod \"redhat-marketplace-sk6x5\" (UID: \"ba225260-80d3-43b8-b47b-15ddf15a578c\") " pod="openshift-marketplace/redhat-marketplace-sk6x5" Oct 13 21:53:42 crc kubenswrapper[4689]: I1013 21:53:42.588338 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff5rj\" (UniqueName: \"kubernetes.io/projected/ba225260-80d3-43b8-b47b-15ddf15a578c-kube-api-access-ff5rj\") pod \"redhat-marketplace-sk6x5\" (UID: \"ba225260-80d3-43b8-b47b-15ddf15a578c\") " pod="openshift-marketplace/redhat-marketplace-sk6x5" Oct 13 21:53:42 crc kubenswrapper[4689]: I1013 21:53:42.588750 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba225260-80d3-43b8-b47b-15ddf15a578c-utilities\") pod \"redhat-marketplace-sk6x5\" (UID: \"ba225260-80d3-43b8-b47b-15ddf15a578c\") " pod="openshift-marketplace/redhat-marketplace-sk6x5" Oct 13 21:53:42 crc kubenswrapper[4689]: I1013 21:53:42.588834 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba225260-80d3-43b8-b47b-15ddf15a578c-catalog-content\") pod \"redhat-marketplace-sk6x5\" (UID: \"ba225260-80d3-43b8-b47b-15ddf15a578c\") " pod="openshift-marketplace/redhat-marketplace-sk6x5" Oct 13 21:53:42 crc kubenswrapper[4689]: I1013 21:53:42.606804 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff5rj\" (UniqueName: \"kubernetes.io/projected/ba225260-80d3-43b8-b47b-15ddf15a578c-kube-api-access-ff5rj\") pod \"redhat-marketplace-sk6x5\" (UID: \"ba225260-80d3-43b8-b47b-15ddf15a578c\") " pod="openshift-marketplace/redhat-marketplace-sk6x5" Oct 13 21:53:42 crc kubenswrapper[4689]: I1013 21:53:42.676378 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sk6x5" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.131887 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sk6x5"] Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.282643 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sk6x5" event={"ID":"ba225260-80d3-43b8-b47b-15ddf15a578c","Type":"ContainerStarted","Data":"a115b2734d78f524f95055aeda3c3aa768ab4b3fa880844bc7f2b7537fc22b65"} Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.670089 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.810213 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdpd4\" (UniqueName: \"kubernetes.io/projected/88053993-c10c-49d4-b69a-82c745001999-kube-api-access-zdpd4\") pod \"88053993-c10c-49d4-b69a-82c745001999\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.810288 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-ssh-key\") pod \"88053993-c10c-49d4-b69a-82c745001999\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.810348 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-cell1-compute-config-0\") pod \"88053993-c10c-49d4-b69a-82c745001999\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.810408 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-migration-ssh-key-1\") pod \"88053993-c10c-49d4-b69a-82c745001999\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.810435 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-combined-ca-bundle\") pod \"88053993-c10c-49d4-b69a-82c745001999\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.810512 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-migration-ssh-key-0\") pod \"88053993-c10c-49d4-b69a-82c745001999\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.810669 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-cell1-compute-config-1\") pod \"88053993-c10c-49d4-b69a-82c745001999\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.810724 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-inventory\") pod \"88053993-c10c-49d4-b69a-82c745001999\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.810758 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/88053993-c10c-49d4-b69a-82c745001999-nova-extra-config-0\") pod \"88053993-c10c-49d4-b69a-82c745001999\" (UID: \"88053993-c10c-49d4-b69a-82c745001999\") " Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.817233 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88053993-c10c-49d4-b69a-82c745001999-kube-api-access-zdpd4" (OuterVolumeSpecName: "kube-api-access-zdpd4") pod "88053993-c10c-49d4-b69a-82c745001999" (UID: "88053993-c10c-49d4-b69a-82c745001999"). InnerVolumeSpecName "kube-api-access-zdpd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.837625 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "88053993-c10c-49d4-b69a-82c745001999" (UID: "88053993-c10c-49d4-b69a-82c745001999"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.839808 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88053993-c10c-49d4-b69a-82c745001999-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "88053993-c10c-49d4-b69a-82c745001999" (UID: "88053993-c10c-49d4-b69a-82c745001999"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.842151 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "88053993-c10c-49d4-b69a-82c745001999" (UID: "88053993-c10c-49d4-b69a-82c745001999"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.842436 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "88053993-c10c-49d4-b69a-82c745001999" (UID: "88053993-c10c-49d4-b69a-82c745001999"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.850632 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-inventory" (OuterVolumeSpecName: "inventory") pod "88053993-c10c-49d4-b69a-82c745001999" (UID: "88053993-c10c-49d4-b69a-82c745001999"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.852892 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "88053993-c10c-49d4-b69a-82c745001999" (UID: "88053993-c10c-49d4-b69a-82c745001999"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.856096 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "88053993-c10c-49d4-b69a-82c745001999" (UID: "88053993-c10c-49d4-b69a-82c745001999"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.856729 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "88053993-c10c-49d4-b69a-82c745001999" (UID: "88053993-c10c-49d4-b69a-82c745001999"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.914238 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdpd4\" (UniqueName: \"kubernetes.io/projected/88053993-c10c-49d4-b69a-82c745001999-kube-api-access-zdpd4\") on node \"crc\" DevicePath \"\"" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.914273 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.914283 4689 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.914293 4689 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.914301 4689 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.914312 4689 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.914321 4689 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.914329 4689 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88053993-c10c-49d4-b69a-82c745001999-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 21:53:43 crc kubenswrapper[4689]: I1013 21:53:43.914339 4689 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/88053993-c10c-49d4-b69a-82c745001999-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.292742 4689 generic.go:334] "Generic (PLEG): container finished" podID="ba225260-80d3-43b8-b47b-15ddf15a578c" containerID="d05585b386432ac673a31afde4f4ce6a654d3e6241fe5458ef6cda334da7d5ad" exitCode=0 Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.292813 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sk6x5" event={"ID":"ba225260-80d3-43b8-b47b-15ddf15a578c","Type":"ContainerDied","Data":"d05585b386432ac673a31afde4f4ce6a654d3e6241fe5458ef6cda334da7d5ad"} Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.294979 4689 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.295136 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" event={"ID":"88053993-c10c-49d4-b69a-82c745001999","Type":"ContainerDied","Data":"801f181ee9e472e17a92887311ef8f453f2b57324d5eb02f4bb6a34db4ee6525"} Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.295156 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="801f181ee9e472e17a92887311ef8f453f2b57324d5eb02f4bb6a34db4ee6525" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.295208 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-jqvm9" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.388125 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47"] Oct 13 21:53:44 crc kubenswrapper[4689]: E1013 21:53:44.388604 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88053993-c10c-49d4-b69a-82c745001999" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.388623 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="88053993-c10c-49d4-b69a-82c745001999" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.388865 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="88053993-c10c-49d4-b69a-82c745001999" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.389502 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.405491 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47"] Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.414162 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.414261 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.414344 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d5nnx" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.415141 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.415368 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.527430 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.527704 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.527765 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.527884 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n7vm\" (UniqueName: \"kubernetes.io/projected/a253b97c-0119-461e-bf69-7dfe5bb90e7f-kube-api-access-8n7vm\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.527970 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.528024 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.528054 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.629294 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.629363 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.629400 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n7vm\" (UniqueName: \"kubernetes.io/projected/a253b97c-0119-461e-bf69-7dfe5bb90e7f-kube-api-access-8n7vm\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.629467 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.629497 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.629518 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.629554 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.634270 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.635178 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.635186 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.635553 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.637516 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.639147 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.645100 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n7vm\" (UniqueName: \"kubernetes.io/projected/a253b97c-0119-461e-bf69-7dfe5bb90e7f-kube-api-access-8n7vm\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-v8s47\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:44 crc kubenswrapper[4689]: I1013 21:53:44.732762 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:53:45 crc kubenswrapper[4689]: I1013 21:53:45.300589 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47"] Oct 13 21:53:46 crc kubenswrapper[4689]: I1013 21:53:46.315921 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" event={"ID":"a253b97c-0119-461e-bf69-7dfe5bb90e7f","Type":"ContainerStarted","Data":"459cdaa28e0dc78afcdeb014b9dd1f4740a865878b943f2cdbe3f08bec8cb041"} Oct 13 21:53:46 crc kubenswrapper[4689]: I1013 21:53:46.316218 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" event={"ID":"a253b97c-0119-461e-bf69-7dfe5bb90e7f","Type":"ContainerStarted","Data":"ebaa5587a9872641c27e89961d2f3752f92256d34c67e9457f2bbf129a10c86d"} Oct 13 21:53:46 crc kubenswrapper[4689]: I1013 21:53:46.321209 4689 generic.go:334] "Generic (PLEG): container finished" podID="ba225260-80d3-43b8-b47b-15ddf15a578c" containerID="09bc8f2dd59a034d61912cdac601ca9cae24b451d9c3f9e5a80dcee913cffc73" exitCode=0 Oct 13 21:53:46 crc kubenswrapper[4689]: I1013 21:53:46.321247 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sk6x5" event={"ID":"ba225260-80d3-43b8-b47b-15ddf15a578c","Type":"ContainerDied","Data":"09bc8f2dd59a034d61912cdac601ca9cae24b451d9c3f9e5a80dcee913cffc73"} Oct 13 21:53:46 crc kubenswrapper[4689]: I1013 21:53:46.335751 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" podStartSLOduration=1.8636105490000001 podStartE2EDuration="2.335733012s" podCreationTimestamp="2025-10-13 21:53:44 +0000 UTC" firstStartedPulling="2025-10-13 21:53:45.323776243 +0000 UTC m=+2542.242021328" lastFinishedPulling="2025-10-13 21:53:45.795898706 +0000 UTC m=+2542.714143791" observedRunningTime="2025-10-13 21:53:46.334433901 +0000 UTC m=+2543.252678986" watchObservedRunningTime="2025-10-13 21:53:46.335733012 +0000 UTC m=+2543.253978107" Oct 13 21:53:47 crc kubenswrapper[4689]: I1013 21:53:47.336821 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sk6x5" event={"ID":"ba225260-80d3-43b8-b47b-15ddf15a578c","Type":"ContainerStarted","Data":"f11e404611db8f5537e63a5369038f88496e0e24076f84bd6054415fbd67d634"} Oct 13 21:53:47 crc kubenswrapper[4689]: I1013 21:53:47.362129 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sk6x5" podStartSLOduration=2.834900519 podStartE2EDuration="5.362100356s" podCreationTimestamp="2025-10-13 21:53:42 +0000 UTC" firstStartedPulling="2025-10-13 21:53:44.294770266 +0000 UTC m=+2541.213015351" lastFinishedPulling="2025-10-13 21:53:46.821970103 +0000 UTC m=+2543.740215188" observedRunningTime="2025-10-13 21:53:47.361264027 +0000 UTC m=+2544.279509152" watchObservedRunningTime="2025-10-13 21:53:47.362100356 +0000 UTC m=+2544.280345481" Oct 13 21:53:52 crc kubenswrapper[4689]: I1013 21:53:52.676811 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sk6x5" Oct 13 21:53:52 crc kubenswrapper[4689]: I1013 21:53:52.677254 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sk6x5" Oct 13 21:53:52 crc kubenswrapper[4689]: I1013 21:53:52.745699 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sk6x5" Oct 13 21:53:53 crc kubenswrapper[4689]: I1013 21:53:53.432204 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sk6x5" Oct 13 21:53:53 crc kubenswrapper[4689]: I1013 21:53:53.488894 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sk6x5"] Oct 13 21:53:53 crc kubenswrapper[4689]: I1013 21:53:53.872783 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:53:53 crc kubenswrapper[4689]: E1013 21:53:53.873044 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:53:55 crc kubenswrapper[4689]: I1013 21:53:55.400295 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-sk6x5" podUID="ba225260-80d3-43b8-b47b-15ddf15a578c" containerName="registry-server" containerID="cri-o://f11e404611db8f5537e63a5369038f88496e0e24076f84bd6054415fbd67d634" gracePeriod=2 Oct 13 21:53:55 crc kubenswrapper[4689]: I1013 21:53:55.854573 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sk6x5" Oct 13 21:53:55 crc kubenswrapper[4689]: I1013 21:53:55.939981 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba225260-80d3-43b8-b47b-15ddf15a578c-utilities\") pod \"ba225260-80d3-43b8-b47b-15ddf15a578c\" (UID: \"ba225260-80d3-43b8-b47b-15ddf15a578c\") " Oct 13 21:53:55 crc kubenswrapper[4689]: I1013 21:53:55.940418 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ff5rj\" (UniqueName: \"kubernetes.io/projected/ba225260-80d3-43b8-b47b-15ddf15a578c-kube-api-access-ff5rj\") pod \"ba225260-80d3-43b8-b47b-15ddf15a578c\" (UID: \"ba225260-80d3-43b8-b47b-15ddf15a578c\") " Oct 13 21:53:55 crc kubenswrapper[4689]: I1013 21:53:55.940769 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba225260-80d3-43b8-b47b-15ddf15a578c-catalog-content\") pod \"ba225260-80d3-43b8-b47b-15ddf15a578c\" (UID: \"ba225260-80d3-43b8-b47b-15ddf15a578c\") " Oct 13 21:53:55 crc kubenswrapper[4689]: I1013 21:53:55.941009 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba225260-80d3-43b8-b47b-15ddf15a578c-utilities" (OuterVolumeSpecName: "utilities") pod "ba225260-80d3-43b8-b47b-15ddf15a578c" (UID: "ba225260-80d3-43b8-b47b-15ddf15a578c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:53:55 crc kubenswrapper[4689]: I1013 21:53:55.941477 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba225260-80d3-43b8-b47b-15ddf15a578c-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:53:55 crc kubenswrapper[4689]: I1013 21:53:55.946621 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba225260-80d3-43b8-b47b-15ddf15a578c-kube-api-access-ff5rj" (OuterVolumeSpecName: "kube-api-access-ff5rj") pod "ba225260-80d3-43b8-b47b-15ddf15a578c" (UID: "ba225260-80d3-43b8-b47b-15ddf15a578c"). InnerVolumeSpecName "kube-api-access-ff5rj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:53:55 crc kubenswrapper[4689]: I1013 21:53:55.954519 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba225260-80d3-43b8-b47b-15ddf15a578c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ba225260-80d3-43b8-b47b-15ddf15a578c" (UID: "ba225260-80d3-43b8-b47b-15ddf15a578c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:53:56 crc kubenswrapper[4689]: I1013 21:53:56.043505 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba225260-80d3-43b8-b47b-15ddf15a578c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:53:56 crc kubenswrapper[4689]: I1013 21:53:56.043760 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ff5rj\" (UniqueName: \"kubernetes.io/projected/ba225260-80d3-43b8-b47b-15ddf15a578c-kube-api-access-ff5rj\") on node \"crc\" DevicePath \"\"" Oct 13 21:53:56 crc kubenswrapper[4689]: I1013 21:53:56.410552 4689 generic.go:334] "Generic (PLEG): container finished" podID="ba225260-80d3-43b8-b47b-15ddf15a578c" containerID="f11e404611db8f5537e63a5369038f88496e0e24076f84bd6054415fbd67d634" exitCode=0 Oct 13 21:53:56 crc kubenswrapper[4689]: I1013 21:53:56.410641 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sk6x5" Oct 13 21:53:56 crc kubenswrapper[4689]: I1013 21:53:56.410655 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sk6x5" event={"ID":"ba225260-80d3-43b8-b47b-15ddf15a578c","Type":"ContainerDied","Data":"f11e404611db8f5537e63a5369038f88496e0e24076f84bd6054415fbd67d634"} Oct 13 21:53:56 crc kubenswrapper[4689]: I1013 21:53:56.411041 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sk6x5" event={"ID":"ba225260-80d3-43b8-b47b-15ddf15a578c","Type":"ContainerDied","Data":"a115b2734d78f524f95055aeda3c3aa768ab4b3fa880844bc7f2b7537fc22b65"} Oct 13 21:53:56 crc kubenswrapper[4689]: I1013 21:53:56.411065 4689 scope.go:117] "RemoveContainer" containerID="f11e404611db8f5537e63a5369038f88496e0e24076f84bd6054415fbd67d634" Oct 13 21:53:56 crc kubenswrapper[4689]: I1013 21:53:56.430736 4689 scope.go:117] "RemoveContainer" containerID="09bc8f2dd59a034d61912cdac601ca9cae24b451d9c3f9e5a80dcee913cffc73" Oct 13 21:53:56 crc kubenswrapper[4689]: I1013 21:53:56.456038 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sk6x5"] Oct 13 21:53:56 crc kubenswrapper[4689]: I1013 21:53:56.461385 4689 scope.go:117] "RemoveContainer" containerID="d05585b386432ac673a31afde4f4ce6a654d3e6241fe5458ef6cda334da7d5ad" Oct 13 21:53:56 crc kubenswrapper[4689]: I1013 21:53:56.466725 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sk6x5"] Oct 13 21:53:56 crc kubenswrapper[4689]: I1013 21:53:56.498327 4689 scope.go:117] "RemoveContainer" containerID="f11e404611db8f5537e63a5369038f88496e0e24076f84bd6054415fbd67d634" Oct 13 21:53:56 crc kubenswrapper[4689]: E1013 21:53:56.498800 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f11e404611db8f5537e63a5369038f88496e0e24076f84bd6054415fbd67d634\": container with ID starting with f11e404611db8f5537e63a5369038f88496e0e24076f84bd6054415fbd67d634 not found: ID does not exist" containerID="f11e404611db8f5537e63a5369038f88496e0e24076f84bd6054415fbd67d634" Oct 13 21:53:56 crc kubenswrapper[4689]: I1013 21:53:56.498879 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f11e404611db8f5537e63a5369038f88496e0e24076f84bd6054415fbd67d634"} err="failed to get container status \"f11e404611db8f5537e63a5369038f88496e0e24076f84bd6054415fbd67d634\": rpc error: code = NotFound desc = could not find container \"f11e404611db8f5537e63a5369038f88496e0e24076f84bd6054415fbd67d634\": container with ID starting with f11e404611db8f5537e63a5369038f88496e0e24076f84bd6054415fbd67d634 not found: ID does not exist" Oct 13 21:53:56 crc kubenswrapper[4689]: I1013 21:53:56.498917 4689 scope.go:117] "RemoveContainer" containerID="09bc8f2dd59a034d61912cdac601ca9cae24b451d9c3f9e5a80dcee913cffc73" Oct 13 21:53:56 crc kubenswrapper[4689]: E1013 21:53:56.499212 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09bc8f2dd59a034d61912cdac601ca9cae24b451d9c3f9e5a80dcee913cffc73\": container with ID starting with 09bc8f2dd59a034d61912cdac601ca9cae24b451d9c3f9e5a80dcee913cffc73 not found: ID does not exist" containerID="09bc8f2dd59a034d61912cdac601ca9cae24b451d9c3f9e5a80dcee913cffc73" Oct 13 21:53:56 crc kubenswrapper[4689]: I1013 21:53:56.499299 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09bc8f2dd59a034d61912cdac601ca9cae24b451d9c3f9e5a80dcee913cffc73"} err="failed to get container status \"09bc8f2dd59a034d61912cdac601ca9cae24b451d9c3f9e5a80dcee913cffc73\": rpc error: code = NotFound desc = could not find container \"09bc8f2dd59a034d61912cdac601ca9cae24b451d9c3f9e5a80dcee913cffc73\": container with ID starting with 09bc8f2dd59a034d61912cdac601ca9cae24b451d9c3f9e5a80dcee913cffc73 not found: ID does not exist" Oct 13 21:53:56 crc kubenswrapper[4689]: I1013 21:53:56.499396 4689 scope.go:117] "RemoveContainer" containerID="d05585b386432ac673a31afde4f4ce6a654d3e6241fe5458ef6cda334da7d5ad" Oct 13 21:53:56 crc kubenswrapper[4689]: E1013 21:53:56.499735 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d05585b386432ac673a31afde4f4ce6a654d3e6241fe5458ef6cda334da7d5ad\": container with ID starting with d05585b386432ac673a31afde4f4ce6a654d3e6241fe5458ef6cda334da7d5ad not found: ID does not exist" containerID="d05585b386432ac673a31afde4f4ce6a654d3e6241fe5458ef6cda334da7d5ad" Oct 13 21:53:56 crc kubenswrapper[4689]: I1013 21:53:56.499764 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d05585b386432ac673a31afde4f4ce6a654d3e6241fe5458ef6cda334da7d5ad"} err="failed to get container status \"d05585b386432ac673a31afde4f4ce6a654d3e6241fe5458ef6cda334da7d5ad\": rpc error: code = NotFound desc = could not find container \"d05585b386432ac673a31afde4f4ce6a654d3e6241fe5458ef6cda334da7d5ad\": container with ID starting with d05585b386432ac673a31afde4f4ce6a654d3e6241fe5458ef6cda334da7d5ad not found: ID does not exist" Oct 13 21:53:57 crc kubenswrapper[4689]: I1013 21:53:57.878772 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba225260-80d3-43b8-b47b-15ddf15a578c" path="/var/lib/kubelet/pods/ba225260-80d3-43b8-b47b-15ddf15a578c/volumes" Oct 13 21:54:08 crc kubenswrapper[4689]: I1013 21:54:08.867729 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:54:08 crc kubenswrapper[4689]: E1013 21:54:08.869144 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:54:21 crc kubenswrapper[4689]: I1013 21:54:21.868184 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:54:21 crc kubenswrapper[4689]: E1013 21:54:21.868995 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:54:32 crc kubenswrapper[4689]: I1013 21:54:32.868630 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:54:32 crc kubenswrapper[4689]: E1013 21:54:32.869897 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:54:45 crc kubenswrapper[4689]: I1013 21:54:45.867539 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:54:45 crc kubenswrapper[4689]: E1013 21:54:45.868531 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:55:00 crc kubenswrapper[4689]: I1013 21:55:00.867625 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:55:00 crc kubenswrapper[4689]: E1013 21:55:00.868475 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:55:14 crc kubenswrapper[4689]: I1013 21:55:14.867365 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:55:14 crc kubenswrapper[4689]: E1013 21:55:14.868310 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:55:26 crc kubenswrapper[4689]: I1013 21:55:26.867463 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:55:26 crc kubenswrapper[4689]: E1013 21:55:26.868316 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:55:39 crc kubenswrapper[4689]: I1013 21:55:39.867432 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:55:39 crc kubenswrapper[4689]: E1013 21:55:39.868197 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:55:54 crc kubenswrapper[4689]: I1013 21:55:54.867290 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:55:54 crc kubenswrapper[4689]: E1013 21:55:54.868977 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:56:06 crc kubenswrapper[4689]: I1013 21:56:06.867938 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:56:06 crc kubenswrapper[4689]: E1013 21:56:06.869157 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:56:19 crc kubenswrapper[4689]: I1013 21:56:19.867863 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:56:19 crc kubenswrapper[4689]: E1013 21:56:19.868656 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 21:56:26 crc kubenswrapper[4689]: I1013 21:56:26.800568 4689 generic.go:334] "Generic (PLEG): container finished" podID="a253b97c-0119-461e-bf69-7dfe5bb90e7f" containerID="459cdaa28e0dc78afcdeb014b9dd1f4740a865878b943f2cdbe3f08bec8cb041" exitCode=0 Oct 13 21:56:26 crc kubenswrapper[4689]: I1013 21:56:26.800705 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" event={"ID":"a253b97c-0119-461e-bf69-7dfe5bb90e7f","Type":"ContainerDied","Data":"459cdaa28e0dc78afcdeb014b9dd1f4740a865878b943f2cdbe3f08bec8cb041"} Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.266876 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.371288 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-1\") pod \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.371329 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-inventory\") pod \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.371355 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ssh-key\") pod \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.371406 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8n7vm\" (UniqueName: \"kubernetes.io/projected/a253b97c-0119-461e-bf69-7dfe5bb90e7f-kube-api-access-8n7vm\") pod \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.371532 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-2\") pod \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.371669 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-telemetry-combined-ca-bundle\") pod \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.371762 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-0\") pod \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\" (UID: \"a253b97c-0119-461e-bf69-7dfe5bb90e7f\") " Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.378314 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a253b97c-0119-461e-bf69-7dfe5bb90e7f-kube-api-access-8n7vm" (OuterVolumeSpecName: "kube-api-access-8n7vm") pod "a253b97c-0119-461e-bf69-7dfe5bb90e7f" (UID: "a253b97c-0119-461e-bf69-7dfe5bb90e7f"). InnerVolumeSpecName "kube-api-access-8n7vm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.378354 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "a253b97c-0119-461e-bf69-7dfe5bb90e7f" (UID: "a253b97c-0119-461e-bf69-7dfe5bb90e7f"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.400111 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "a253b97c-0119-461e-bf69-7dfe5bb90e7f" (UID: "a253b97c-0119-461e-bf69-7dfe5bb90e7f"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.401154 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "a253b97c-0119-461e-bf69-7dfe5bb90e7f" (UID: "a253b97c-0119-461e-bf69-7dfe5bb90e7f"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.405232 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-inventory" (OuterVolumeSpecName: "inventory") pod "a253b97c-0119-461e-bf69-7dfe5bb90e7f" (UID: "a253b97c-0119-461e-bf69-7dfe5bb90e7f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.412866 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a253b97c-0119-461e-bf69-7dfe5bb90e7f" (UID: "a253b97c-0119-461e-bf69-7dfe5bb90e7f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.413440 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "a253b97c-0119-461e-bf69-7dfe5bb90e7f" (UID: "a253b97c-0119-461e-bf69-7dfe5bb90e7f"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.473775 4689 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.473812 4689 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.473823 4689 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.473837 4689 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-inventory\") on node \"crc\" DevicePath \"\"" Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.473846 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.473856 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8n7vm\" (UniqueName: \"kubernetes.io/projected/a253b97c-0119-461e-bf69-7dfe5bb90e7f-kube-api-access-8n7vm\") on node \"crc\" DevicePath \"\"" Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.473869 4689 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a253b97c-0119-461e-bf69-7dfe5bb90e7f-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.824110 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" event={"ID":"a253b97c-0119-461e-bf69-7dfe5bb90e7f","Type":"ContainerDied","Data":"ebaa5587a9872641c27e89961d2f3752f92256d34c67e9457f2bbf129a10c86d"} Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.824156 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ebaa5587a9872641c27e89961d2f3752f92256d34c67e9457f2bbf129a10c86d" Oct 13 21:56:28 crc kubenswrapper[4689]: I1013 21:56:28.824190 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-v8s47" Oct 13 21:56:31 crc kubenswrapper[4689]: I1013 21:56:31.868503 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 21:56:32 crc kubenswrapper[4689]: I1013 21:56:32.861617 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerStarted","Data":"5a8d6bd605d3b7ccb12aec891829ef3ded1c4a5a8f46d8f1bca1fa1375f530b3"} Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.039766 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-94dsz"] Oct 13 21:56:46 crc kubenswrapper[4689]: E1013 21:56:46.040950 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a253b97c-0119-461e-bf69-7dfe5bb90e7f" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.040968 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="a253b97c-0119-461e-bf69-7dfe5bb90e7f" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 13 21:56:46 crc kubenswrapper[4689]: E1013 21:56:46.040994 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba225260-80d3-43b8-b47b-15ddf15a578c" containerName="registry-server" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.041028 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba225260-80d3-43b8-b47b-15ddf15a578c" containerName="registry-server" Oct 13 21:56:46 crc kubenswrapper[4689]: E1013 21:56:46.041046 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba225260-80d3-43b8-b47b-15ddf15a578c" containerName="extract-content" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.041052 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba225260-80d3-43b8-b47b-15ddf15a578c" containerName="extract-content" Oct 13 21:56:46 crc kubenswrapper[4689]: E1013 21:56:46.041074 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba225260-80d3-43b8-b47b-15ddf15a578c" containerName="extract-utilities" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.041107 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba225260-80d3-43b8-b47b-15ddf15a578c" containerName="extract-utilities" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.041424 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="a253b97c-0119-461e-bf69-7dfe5bb90e7f" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.041449 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba225260-80d3-43b8-b47b-15ddf15a578c" containerName="registry-server" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.042912 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-94dsz" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.060325 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-94dsz"] Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.159727 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kw6sc\" (UniqueName: \"kubernetes.io/projected/54c58829-65a5-4605-9b57-5c262209e513-kube-api-access-kw6sc\") pod \"certified-operators-94dsz\" (UID: \"54c58829-65a5-4605-9b57-5c262209e513\") " pod="openshift-marketplace/certified-operators-94dsz" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.159887 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54c58829-65a5-4605-9b57-5c262209e513-catalog-content\") pod \"certified-operators-94dsz\" (UID: \"54c58829-65a5-4605-9b57-5c262209e513\") " pod="openshift-marketplace/certified-operators-94dsz" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.159915 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54c58829-65a5-4605-9b57-5c262209e513-utilities\") pod \"certified-operators-94dsz\" (UID: \"54c58829-65a5-4605-9b57-5c262209e513\") " pod="openshift-marketplace/certified-operators-94dsz" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.262513 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54c58829-65a5-4605-9b57-5c262209e513-catalog-content\") pod \"certified-operators-94dsz\" (UID: \"54c58829-65a5-4605-9b57-5c262209e513\") " pod="openshift-marketplace/certified-operators-94dsz" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.263187 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54c58829-65a5-4605-9b57-5c262209e513-catalog-content\") pod \"certified-operators-94dsz\" (UID: \"54c58829-65a5-4605-9b57-5c262209e513\") " pod="openshift-marketplace/certified-operators-94dsz" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.263271 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54c58829-65a5-4605-9b57-5c262209e513-utilities\") pod \"certified-operators-94dsz\" (UID: \"54c58829-65a5-4605-9b57-5c262209e513\") " pod="openshift-marketplace/certified-operators-94dsz" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.263448 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kw6sc\" (UniqueName: \"kubernetes.io/projected/54c58829-65a5-4605-9b57-5c262209e513-kube-api-access-kw6sc\") pod \"certified-operators-94dsz\" (UID: \"54c58829-65a5-4605-9b57-5c262209e513\") " pod="openshift-marketplace/certified-operators-94dsz" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.264278 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54c58829-65a5-4605-9b57-5c262209e513-utilities\") pod \"certified-operators-94dsz\" (UID: \"54c58829-65a5-4605-9b57-5c262209e513\") " pod="openshift-marketplace/certified-operators-94dsz" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.299313 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kw6sc\" (UniqueName: \"kubernetes.io/projected/54c58829-65a5-4605-9b57-5c262209e513-kube-api-access-kw6sc\") pod \"certified-operators-94dsz\" (UID: \"54c58829-65a5-4605-9b57-5c262209e513\") " pod="openshift-marketplace/certified-operators-94dsz" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.377656 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-94dsz" Oct 13 21:56:46 crc kubenswrapper[4689]: I1013 21:56:46.875102 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-94dsz"] Oct 13 21:56:47 crc kubenswrapper[4689]: I1013 21:56:47.000496 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-94dsz" event={"ID":"54c58829-65a5-4605-9b57-5c262209e513","Type":"ContainerStarted","Data":"6ff36a644e685125cfb4cdfd696b7be948224927c8ef8325edd664ba968a9f37"} Oct 13 21:56:48 crc kubenswrapper[4689]: I1013 21:56:48.011309 4689 generic.go:334] "Generic (PLEG): container finished" podID="54c58829-65a5-4605-9b57-5c262209e513" containerID="ae6d12364ac0b65fa4061a5ce302ffeca6e2d1a32fcca95edb052c0a27fbc999" exitCode=0 Oct 13 21:56:48 crc kubenswrapper[4689]: I1013 21:56:48.011527 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-94dsz" event={"ID":"54c58829-65a5-4605-9b57-5c262209e513","Type":"ContainerDied","Data":"ae6d12364ac0b65fa4061a5ce302ffeca6e2d1a32fcca95edb052c0a27fbc999"} Oct 13 21:56:49 crc kubenswrapper[4689]: I1013 21:56:49.033849 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-94dsz" event={"ID":"54c58829-65a5-4605-9b57-5c262209e513","Type":"ContainerStarted","Data":"b903d0e9fc61c182a3f924a60e8875360d31021257ac89844c926ccc4d9745cd"} Oct 13 21:56:50 crc kubenswrapper[4689]: I1013 21:56:50.047070 4689 generic.go:334] "Generic (PLEG): container finished" podID="54c58829-65a5-4605-9b57-5c262209e513" containerID="b903d0e9fc61c182a3f924a60e8875360d31021257ac89844c926ccc4d9745cd" exitCode=0 Oct 13 21:56:50 crc kubenswrapper[4689]: I1013 21:56:50.047119 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-94dsz" event={"ID":"54c58829-65a5-4605-9b57-5c262209e513","Type":"ContainerDied","Data":"b903d0e9fc61c182a3f924a60e8875360d31021257ac89844c926ccc4d9745cd"} Oct 13 21:56:51 crc kubenswrapper[4689]: I1013 21:56:51.058542 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-94dsz" event={"ID":"54c58829-65a5-4605-9b57-5c262209e513","Type":"ContainerStarted","Data":"287f128a72e42747bf44201606e32dfa8703e779fb47fd2fdc03b5dee2589c68"} Oct 13 21:56:51 crc kubenswrapper[4689]: I1013 21:56:51.079795 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-94dsz" podStartSLOduration=2.640384334 podStartE2EDuration="5.079774529s" podCreationTimestamp="2025-10-13 21:56:46 +0000 UTC" firstStartedPulling="2025-10-13 21:56:48.015482778 +0000 UTC m=+2724.933727863" lastFinishedPulling="2025-10-13 21:56:50.454872973 +0000 UTC m=+2727.373118058" observedRunningTime="2025-10-13 21:56:51.076082871 +0000 UTC m=+2727.994327966" watchObservedRunningTime="2025-10-13 21:56:51.079774529 +0000 UTC m=+2727.998019614" Oct 13 21:56:56 crc kubenswrapper[4689]: I1013 21:56:56.377932 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-94dsz" Oct 13 21:56:56 crc kubenswrapper[4689]: I1013 21:56:56.378365 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-94dsz" Oct 13 21:56:56 crc kubenswrapper[4689]: I1013 21:56:56.423527 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-94dsz" Oct 13 21:56:57 crc kubenswrapper[4689]: I1013 21:56:57.207005 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-94dsz" Oct 13 21:56:57 crc kubenswrapper[4689]: I1013 21:56:57.285172 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-94dsz"] Oct 13 21:56:59 crc kubenswrapper[4689]: I1013 21:56:59.141515 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-94dsz" podUID="54c58829-65a5-4605-9b57-5c262209e513" containerName="registry-server" containerID="cri-o://287f128a72e42747bf44201606e32dfa8703e779fb47fd2fdc03b5dee2589c68" gracePeriod=2 Oct 13 21:56:59 crc kubenswrapper[4689]: I1013 21:56:59.623188 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-94dsz" Oct 13 21:56:59 crc kubenswrapper[4689]: I1013 21:56:59.761516 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kw6sc\" (UniqueName: \"kubernetes.io/projected/54c58829-65a5-4605-9b57-5c262209e513-kube-api-access-kw6sc\") pod \"54c58829-65a5-4605-9b57-5c262209e513\" (UID: \"54c58829-65a5-4605-9b57-5c262209e513\") " Oct 13 21:56:59 crc kubenswrapper[4689]: I1013 21:56:59.761605 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54c58829-65a5-4605-9b57-5c262209e513-catalog-content\") pod \"54c58829-65a5-4605-9b57-5c262209e513\" (UID: \"54c58829-65a5-4605-9b57-5c262209e513\") " Oct 13 21:56:59 crc kubenswrapper[4689]: I1013 21:56:59.761722 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54c58829-65a5-4605-9b57-5c262209e513-utilities\") pod \"54c58829-65a5-4605-9b57-5c262209e513\" (UID: \"54c58829-65a5-4605-9b57-5c262209e513\") " Oct 13 21:56:59 crc kubenswrapper[4689]: I1013 21:56:59.763010 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54c58829-65a5-4605-9b57-5c262209e513-utilities" (OuterVolumeSpecName: "utilities") pod "54c58829-65a5-4605-9b57-5c262209e513" (UID: "54c58829-65a5-4605-9b57-5c262209e513"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:56:59 crc kubenswrapper[4689]: I1013 21:56:59.769851 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54c58829-65a5-4605-9b57-5c262209e513-kube-api-access-kw6sc" (OuterVolumeSpecName: "kube-api-access-kw6sc") pod "54c58829-65a5-4605-9b57-5c262209e513" (UID: "54c58829-65a5-4605-9b57-5c262209e513"). InnerVolumeSpecName "kube-api-access-kw6sc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:56:59 crc kubenswrapper[4689]: I1013 21:56:59.863513 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54c58829-65a5-4605-9b57-5c262209e513-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:56:59 crc kubenswrapper[4689]: I1013 21:56:59.863541 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kw6sc\" (UniqueName: \"kubernetes.io/projected/54c58829-65a5-4605-9b57-5c262209e513-kube-api-access-kw6sc\") on node \"crc\" DevicePath \"\"" Oct 13 21:56:59 crc kubenswrapper[4689]: I1013 21:56:59.877224 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54c58829-65a5-4605-9b57-5c262209e513-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "54c58829-65a5-4605-9b57-5c262209e513" (UID: "54c58829-65a5-4605-9b57-5c262209e513"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:56:59 crc kubenswrapper[4689]: I1013 21:56:59.965291 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54c58829-65a5-4605-9b57-5c262209e513-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:57:00 crc kubenswrapper[4689]: I1013 21:57:00.155303 4689 generic.go:334] "Generic (PLEG): container finished" podID="54c58829-65a5-4605-9b57-5c262209e513" containerID="287f128a72e42747bf44201606e32dfa8703e779fb47fd2fdc03b5dee2589c68" exitCode=0 Oct 13 21:57:00 crc kubenswrapper[4689]: I1013 21:57:00.155378 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-94dsz" event={"ID":"54c58829-65a5-4605-9b57-5c262209e513","Type":"ContainerDied","Data":"287f128a72e42747bf44201606e32dfa8703e779fb47fd2fdc03b5dee2589c68"} Oct 13 21:57:00 crc kubenswrapper[4689]: I1013 21:57:00.155405 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-94dsz" Oct 13 21:57:00 crc kubenswrapper[4689]: I1013 21:57:00.155425 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-94dsz" event={"ID":"54c58829-65a5-4605-9b57-5c262209e513","Type":"ContainerDied","Data":"6ff36a644e685125cfb4cdfd696b7be948224927c8ef8325edd664ba968a9f37"} Oct 13 21:57:00 crc kubenswrapper[4689]: I1013 21:57:00.155458 4689 scope.go:117] "RemoveContainer" containerID="287f128a72e42747bf44201606e32dfa8703e779fb47fd2fdc03b5dee2589c68" Oct 13 21:57:00 crc kubenswrapper[4689]: I1013 21:57:00.193166 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-94dsz"] Oct 13 21:57:00 crc kubenswrapper[4689]: I1013 21:57:00.210890 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-94dsz"] Oct 13 21:57:00 crc kubenswrapper[4689]: I1013 21:57:00.216769 4689 scope.go:117] "RemoveContainer" containerID="b903d0e9fc61c182a3f924a60e8875360d31021257ac89844c926ccc4d9745cd" Oct 13 21:57:00 crc kubenswrapper[4689]: I1013 21:57:00.242536 4689 scope.go:117] "RemoveContainer" containerID="ae6d12364ac0b65fa4061a5ce302ffeca6e2d1a32fcca95edb052c0a27fbc999" Oct 13 21:57:00 crc kubenswrapper[4689]: I1013 21:57:00.296662 4689 scope.go:117] "RemoveContainer" containerID="287f128a72e42747bf44201606e32dfa8703e779fb47fd2fdc03b5dee2589c68" Oct 13 21:57:00 crc kubenswrapper[4689]: E1013 21:57:00.297250 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"287f128a72e42747bf44201606e32dfa8703e779fb47fd2fdc03b5dee2589c68\": container with ID starting with 287f128a72e42747bf44201606e32dfa8703e779fb47fd2fdc03b5dee2589c68 not found: ID does not exist" containerID="287f128a72e42747bf44201606e32dfa8703e779fb47fd2fdc03b5dee2589c68" Oct 13 21:57:00 crc kubenswrapper[4689]: I1013 21:57:00.297311 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"287f128a72e42747bf44201606e32dfa8703e779fb47fd2fdc03b5dee2589c68"} err="failed to get container status \"287f128a72e42747bf44201606e32dfa8703e779fb47fd2fdc03b5dee2589c68\": rpc error: code = NotFound desc = could not find container \"287f128a72e42747bf44201606e32dfa8703e779fb47fd2fdc03b5dee2589c68\": container with ID starting with 287f128a72e42747bf44201606e32dfa8703e779fb47fd2fdc03b5dee2589c68 not found: ID does not exist" Oct 13 21:57:00 crc kubenswrapper[4689]: I1013 21:57:00.297349 4689 scope.go:117] "RemoveContainer" containerID="b903d0e9fc61c182a3f924a60e8875360d31021257ac89844c926ccc4d9745cd" Oct 13 21:57:00 crc kubenswrapper[4689]: E1013 21:57:00.298108 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b903d0e9fc61c182a3f924a60e8875360d31021257ac89844c926ccc4d9745cd\": container with ID starting with b903d0e9fc61c182a3f924a60e8875360d31021257ac89844c926ccc4d9745cd not found: ID does not exist" containerID="b903d0e9fc61c182a3f924a60e8875360d31021257ac89844c926ccc4d9745cd" Oct 13 21:57:00 crc kubenswrapper[4689]: I1013 21:57:00.298152 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b903d0e9fc61c182a3f924a60e8875360d31021257ac89844c926ccc4d9745cd"} err="failed to get container status \"b903d0e9fc61c182a3f924a60e8875360d31021257ac89844c926ccc4d9745cd\": rpc error: code = NotFound desc = could not find container \"b903d0e9fc61c182a3f924a60e8875360d31021257ac89844c926ccc4d9745cd\": container with ID starting with b903d0e9fc61c182a3f924a60e8875360d31021257ac89844c926ccc4d9745cd not found: ID does not exist" Oct 13 21:57:00 crc kubenswrapper[4689]: I1013 21:57:00.298185 4689 scope.go:117] "RemoveContainer" containerID="ae6d12364ac0b65fa4061a5ce302ffeca6e2d1a32fcca95edb052c0a27fbc999" Oct 13 21:57:00 crc kubenswrapper[4689]: E1013 21:57:00.298576 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae6d12364ac0b65fa4061a5ce302ffeca6e2d1a32fcca95edb052c0a27fbc999\": container with ID starting with ae6d12364ac0b65fa4061a5ce302ffeca6e2d1a32fcca95edb052c0a27fbc999 not found: ID does not exist" containerID="ae6d12364ac0b65fa4061a5ce302ffeca6e2d1a32fcca95edb052c0a27fbc999" Oct 13 21:57:00 crc kubenswrapper[4689]: I1013 21:57:00.298616 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae6d12364ac0b65fa4061a5ce302ffeca6e2d1a32fcca95edb052c0a27fbc999"} err="failed to get container status \"ae6d12364ac0b65fa4061a5ce302ffeca6e2d1a32fcca95edb052c0a27fbc999\": rpc error: code = NotFound desc = could not find container \"ae6d12364ac0b65fa4061a5ce302ffeca6e2d1a32fcca95edb052c0a27fbc999\": container with ID starting with ae6d12364ac0b65fa4061a5ce302ffeca6e2d1a32fcca95edb052c0a27fbc999 not found: ID does not exist" Oct 13 21:57:01 crc kubenswrapper[4689]: I1013 21:57:01.882276 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54c58829-65a5-4605-9b57-5c262209e513" path="/var/lib/kubelet/pods/54c58829-65a5-4605-9b57-5c262209e513/volumes" Oct 13 21:57:16 crc kubenswrapper[4689]: I1013 21:57:16.259884 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9xjz6"] Oct 13 21:57:16 crc kubenswrapper[4689]: E1013 21:57:16.261060 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54c58829-65a5-4605-9b57-5c262209e513" containerName="extract-content" Oct 13 21:57:16 crc kubenswrapper[4689]: I1013 21:57:16.261085 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="54c58829-65a5-4605-9b57-5c262209e513" containerName="extract-content" Oct 13 21:57:16 crc kubenswrapper[4689]: E1013 21:57:16.261130 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54c58829-65a5-4605-9b57-5c262209e513" containerName="extract-utilities" Oct 13 21:57:16 crc kubenswrapper[4689]: I1013 21:57:16.261142 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="54c58829-65a5-4605-9b57-5c262209e513" containerName="extract-utilities" Oct 13 21:57:16 crc kubenswrapper[4689]: E1013 21:57:16.261160 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54c58829-65a5-4605-9b57-5c262209e513" containerName="registry-server" Oct 13 21:57:16 crc kubenswrapper[4689]: I1013 21:57:16.261170 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="54c58829-65a5-4605-9b57-5c262209e513" containerName="registry-server" Oct 13 21:57:16 crc kubenswrapper[4689]: I1013 21:57:16.261983 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="54c58829-65a5-4605-9b57-5c262209e513" containerName="registry-server" Oct 13 21:57:16 crc kubenswrapper[4689]: I1013 21:57:16.264835 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9xjz6" Oct 13 21:57:16 crc kubenswrapper[4689]: I1013 21:57:16.274098 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9xjz6"] Oct 13 21:57:16 crc kubenswrapper[4689]: I1013 21:57:16.305178 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4d4b186-bfc5-4dd7-b04d-e454769343c3-catalog-content\") pod \"community-operators-9xjz6\" (UID: \"a4d4b186-bfc5-4dd7-b04d-e454769343c3\") " pod="openshift-marketplace/community-operators-9xjz6" Oct 13 21:57:16 crc kubenswrapper[4689]: I1013 21:57:16.305235 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c2kw\" (UniqueName: \"kubernetes.io/projected/a4d4b186-bfc5-4dd7-b04d-e454769343c3-kube-api-access-9c2kw\") pod \"community-operators-9xjz6\" (UID: \"a4d4b186-bfc5-4dd7-b04d-e454769343c3\") " pod="openshift-marketplace/community-operators-9xjz6" Oct 13 21:57:16 crc kubenswrapper[4689]: I1013 21:57:16.305338 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4d4b186-bfc5-4dd7-b04d-e454769343c3-utilities\") pod \"community-operators-9xjz6\" (UID: \"a4d4b186-bfc5-4dd7-b04d-e454769343c3\") " pod="openshift-marketplace/community-operators-9xjz6" Oct 13 21:57:16 crc kubenswrapper[4689]: I1013 21:57:16.410153 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4d4b186-bfc5-4dd7-b04d-e454769343c3-catalog-content\") pod \"community-operators-9xjz6\" (UID: \"a4d4b186-bfc5-4dd7-b04d-e454769343c3\") " pod="openshift-marketplace/community-operators-9xjz6" Oct 13 21:57:16 crc kubenswrapper[4689]: I1013 21:57:16.410466 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c2kw\" (UniqueName: \"kubernetes.io/projected/a4d4b186-bfc5-4dd7-b04d-e454769343c3-kube-api-access-9c2kw\") pod \"community-operators-9xjz6\" (UID: \"a4d4b186-bfc5-4dd7-b04d-e454769343c3\") " pod="openshift-marketplace/community-operators-9xjz6" Oct 13 21:57:16 crc kubenswrapper[4689]: I1013 21:57:16.410655 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4d4b186-bfc5-4dd7-b04d-e454769343c3-utilities\") pod \"community-operators-9xjz6\" (UID: \"a4d4b186-bfc5-4dd7-b04d-e454769343c3\") " pod="openshift-marketplace/community-operators-9xjz6" Oct 13 21:57:16 crc kubenswrapper[4689]: I1013 21:57:16.410870 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4d4b186-bfc5-4dd7-b04d-e454769343c3-catalog-content\") pod \"community-operators-9xjz6\" (UID: \"a4d4b186-bfc5-4dd7-b04d-e454769343c3\") " pod="openshift-marketplace/community-operators-9xjz6" Oct 13 21:57:16 crc kubenswrapper[4689]: I1013 21:57:16.411087 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4d4b186-bfc5-4dd7-b04d-e454769343c3-utilities\") pod \"community-operators-9xjz6\" (UID: \"a4d4b186-bfc5-4dd7-b04d-e454769343c3\") " pod="openshift-marketplace/community-operators-9xjz6" Oct 13 21:57:16 crc kubenswrapper[4689]: I1013 21:57:16.429233 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c2kw\" (UniqueName: \"kubernetes.io/projected/a4d4b186-bfc5-4dd7-b04d-e454769343c3-kube-api-access-9c2kw\") pod \"community-operators-9xjz6\" (UID: \"a4d4b186-bfc5-4dd7-b04d-e454769343c3\") " pod="openshift-marketplace/community-operators-9xjz6" Oct 13 21:57:16 crc kubenswrapper[4689]: I1013 21:57:16.610775 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9xjz6" Oct 13 21:57:17 crc kubenswrapper[4689]: I1013 21:57:17.199758 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9xjz6"] Oct 13 21:57:17 crc kubenswrapper[4689]: I1013 21:57:17.358267 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xjz6" event={"ID":"a4d4b186-bfc5-4dd7-b04d-e454769343c3","Type":"ContainerStarted","Data":"f10ab6c087a56c47b91f681becc756e4f8f1f70be4df390c36ece19863bbe4fc"} Oct 13 21:57:18 crc kubenswrapper[4689]: I1013 21:57:18.370240 4689 generic.go:334] "Generic (PLEG): container finished" podID="a4d4b186-bfc5-4dd7-b04d-e454769343c3" containerID="5c03385e0a36987cddf6a095762ec23a30a8f20d740474b7b964287b0fb0a1e7" exitCode=0 Oct 13 21:57:18 crc kubenswrapper[4689]: I1013 21:57:18.370339 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xjz6" event={"ID":"a4d4b186-bfc5-4dd7-b04d-e454769343c3","Type":"ContainerDied","Data":"5c03385e0a36987cddf6a095762ec23a30a8f20d740474b7b964287b0fb0a1e7"} Oct 13 21:57:19 crc kubenswrapper[4689]: I1013 21:57:19.383889 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xjz6" event={"ID":"a4d4b186-bfc5-4dd7-b04d-e454769343c3","Type":"ContainerStarted","Data":"2aaa00824e855f7cb4adfbb7ce6d388a84d7bb21d5becd233a726f901167669b"} Oct 13 21:57:20 crc kubenswrapper[4689]: I1013 21:57:20.398842 4689 generic.go:334] "Generic (PLEG): container finished" podID="a4d4b186-bfc5-4dd7-b04d-e454769343c3" containerID="2aaa00824e855f7cb4adfbb7ce6d388a84d7bb21d5becd233a726f901167669b" exitCode=0 Oct 13 21:57:20 crc kubenswrapper[4689]: I1013 21:57:20.398885 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xjz6" event={"ID":"a4d4b186-bfc5-4dd7-b04d-e454769343c3","Type":"ContainerDied","Data":"2aaa00824e855f7cb4adfbb7ce6d388a84d7bb21d5becd233a726f901167669b"} Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.411224 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xjz6" event={"ID":"a4d4b186-bfc5-4dd7-b04d-e454769343c3","Type":"ContainerStarted","Data":"0bc77e0a76a7643f15ac227229d0bfe52399c647ebedf8f904603db565e6a192"} Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.438160 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9xjz6" podStartSLOduration=2.937928244 podStartE2EDuration="5.438140293s" podCreationTimestamp="2025-10-13 21:57:16 +0000 UTC" firstStartedPulling="2025-10-13 21:57:18.372279814 +0000 UTC m=+2755.290524899" lastFinishedPulling="2025-10-13 21:57:20.872491863 +0000 UTC m=+2757.790736948" observedRunningTime="2025-10-13 21:57:21.435544111 +0000 UTC m=+2758.353789196" watchObservedRunningTime="2025-10-13 21:57:21.438140293 +0000 UTC m=+2758.356385368" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.713031 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.714415 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.717140 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.718297 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-mb9bl" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.721208 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.724646 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.728831 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.813371 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2fd6769f-1acf-441d-8569-13baec5fcf72-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.813480 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.813546 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.813693 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.813748 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2fd6769f-1acf-441d-8569-13baec5fcf72-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.813778 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2fd6769f-1acf-441d-8569-13baec5fcf72-config-data\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.813804 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6b8t\" (UniqueName: \"kubernetes.io/projected/2fd6769f-1acf-441d-8569-13baec5fcf72-kube-api-access-h6b8t\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.813860 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2fd6769f-1acf-441d-8569-13baec5fcf72-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.813897 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.915943 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.916072 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.916151 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2fd6769f-1acf-441d-8569-13baec5fcf72-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.917558 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2fd6769f-1acf-441d-8569-13baec5fcf72-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.916577 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2fd6769f-1acf-441d-8569-13baec5fcf72-config-data\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.917805 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6b8t\" (UniqueName: \"kubernetes.io/projected/2fd6769f-1acf-441d-8569-13baec5fcf72-kube-api-access-h6b8t\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.917870 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2fd6769f-1acf-441d-8569-13baec5fcf72-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.917932 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.917976 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2fd6769f-1acf-441d-8569-13baec5fcf72-config-data\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.918079 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2fd6769f-1acf-441d-8569-13baec5fcf72-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.918196 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.918224 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2fd6769f-1acf-441d-8569-13baec5fcf72-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.918357 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.918465 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2fd6769f-1acf-441d-8569-13baec5fcf72-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.924091 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.924093 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.924726 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.940881 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6b8t\" (UniqueName: \"kubernetes.io/projected/2fd6769f-1acf-441d-8569-13baec5fcf72-kube-api-access-h6b8t\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:21 crc kubenswrapper[4689]: I1013 21:57:21.945728 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " pod="openstack/tempest-tests-tempest" Oct 13 21:57:22 crc kubenswrapper[4689]: I1013 21:57:22.071287 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 13 21:57:22 crc kubenswrapper[4689]: I1013 21:57:22.535569 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 13 21:57:22 crc kubenswrapper[4689]: W1013 21:57:22.542637 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fd6769f_1acf_441d_8569_13baec5fcf72.slice/crio-4de41aab0177c9b91cd8456af6d35b84d5df458a9932261968834509ca72402c WatchSource:0}: Error finding container 4de41aab0177c9b91cd8456af6d35b84d5df458a9932261968834509ca72402c: Status 404 returned error can't find the container with id 4de41aab0177c9b91cd8456af6d35b84d5df458a9932261968834509ca72402c Oct 13 21:57:23 crc kubenswrapper[4689]: I1013 21:57:23.449058 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2fd6769f-1acf-441d-8569-13baec5fcf72","Type":"ContainerStarted","Data":"4de41aab0177c9b91cd8456af6d35b84d5df458a9932261968834509ca72402c"} Oct 13 21:57:24 crc kubenswrapper[4689]: I1013 21:57:24.275651 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-szfmk"] Oct 13 21:57:24 crc kubenswrapper[4689]: I1013 21:57:24.278392 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-szfmk" Oct 13 21:57:24 crc kubenswrapper[4689]: I1013 21:57:24.320129 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-szfmk"] Oct 13 21:57:24 crc kubenswrapper[4689]: I1013 21:57:24.370233 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a72351b-cf75-43c0-ac66-acd865695d76-utilities\") pod \"redhat-operators-szfmk\" (UID: \"3a72351b-cf75-43c0-ac66-acd865695d76\") " pod="openshift-marketplace/redhat-operators-szfmk" Oct 13 21:57:24 crc kubenswrapper[4689]: I1013 21:57:24.370302 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-977h7\" (UniqueName: \"kubernetes.io/projected/3a72351b-cf75-43c0-ac66-acd865695d76-kube-api-access-977h7\") pod \"redhat-operators-szfmk\" (UID: \"3a72351b-cf75-43c0-ac66-acd865695d76\") " pod="openshift-marketplace/redhat-operators-szfmk" Oct 13 21:57:24 crc kubenswrapper[4689]: I1013 21:57:24.370372 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a72351b-cf75-43c0-ac66-acd865695d76-catalog-content\") pod \"redhat-operators-szfmk\" (UID: \"3a72351b-cf75-43c0-ac66-acd865695d76\") " pod="openshift-marketplace/redhat-operators-szfmk" Oct 13 21:57:24 crc kubenswrapper[4689]: I1013 21:57:24.471773 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a72351b-cf75-43c0-ac66-acd865695d76-utilities\") pod \"redhat-operators-szfmk\" (UID: \"3a72351b-cf75-43c0-ac66-acd865695d76\") " pod="openshift-marketplace/redhat-operators-szfmk" Oct 13 21:57:24 crc kubenswrapper[4689]: I1013 21:57:24.471843 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-977h7\" (UniqueName: \"kubernetes.io/projected/3a72351b-cf75-43c0-ac66-acd865695d76-kube-api-access-977h7\") pod \"redhat-operators-szfmk\" (UID: \"3a72351b-cf75-43c0-ac66-acd865695d76\") " pod="openshift-marketplace/redhat-operators-szfmk" Oct 13 21:57:24 crc kubenswrapper[4689]: I1013 21:57:24.471911 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a72351b-cf75-43c0-ac66-acd865695d76-catalog-content\") pod \"redhat-operators-szfmk\" (UID: \"3a72351b-cf75-43c0-ac66-acd865695d76\") " pod="openshift-marketplace/redhat-operators-szfmk" Oct 13 21:57:24 crc kubenswrapper[4689]: I1013 21:57:24.472383 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a72351b-cf75-43c0-ac66-acd865695d76-catalog-content\") pod \"redhat-operators-szfmk\" (UID: \"3a72351b-cf75-43c0-ac66-acd865695d76\") " pod="openshift-marketplace/redhat-operators-szfmk" Oct 13 21:57:24 crc kubenswrapper[4689]: I1013 21:57:24.472681 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a72351b-cf75-43c0-ac66-acd865695d76-utilities\") pod \"redhat-operators-szfmk\" (UID: \"3a72351b-cf75-43c0-ac66-acd865695d76\") " pod="openshift-marketplace/redhat-operators-szfmk" Oct 13 21:57:24 crc kubenswrapper[4689]: I1013 21:57:24.495661 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-977h7\" (UniqueName: \"kubernetes.io/projected/3a72351b-cf75-43c0-ac66-acd865695d76-kube-api-access-977h7\") pod \"redhat-operators-szfmk\" (UID: \"3a72351b-cf75-43c0-ac66-acd865695d76\") " pod="openshift-marketplace/redhat-operators-szfmk" Oct 13 21:57:24 crc kubenswrapper[4689]: I1013 21:57:24.640951 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-szfmk" Oct 13 21:57:25 crc kubenswrapper[4689]: I1013 21:57:25.111824 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-szfmk"] Oct 13 21:57:25 crc kubenswrapper[4689]: W1013 21:57:25.128680 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a72351b_cf75_43c0_ac66_acd865695d76.slice/crio-94e969866a3e99c287713d4cc32fddd8769c0a0581af0119b122a983f4e9d21a WatchSource:0}: Error finding container 94e969866a3e99c287713d4cc32fddd8769c0a0581af0119b122a983f4e9d21a: Status 404 returned error can't find the container with id 94e969866a3e99c287713d4cc32fddd8769c0a0581af0119b122a983f4e9d21a Oct 13 21:57:25 crc kubenswrapper[4689]: I1013 21:57:25.475346 4689 generic.go:334] "Generic (PLEG): container finished" podID="3a72351b-cf75-43c0-ac66-acd865695d76" containerID="c9f96a630c5656bb3459fac1ff25661d828bcd829a8100f55bace997dd9008f3" exitCode=0 Oct 13 21:57:25 crc kubenswrapper[4689]: I1013 21:57:25.475400 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-szfmk" event={"ID":"3a72351b-cf75-43c0-ac66-acd865695d76","Type":"ContainerDied","Data":"c9f96a630c5656bb3459fac1ff25661d828bcd829a8100f55bace997dd9008f3"} Oct 13 21:57:25 crc kubenswrapper[4689]: I1013 21:57:25.475427 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-szfmk" event={"ID":"3a72351b-cf75-43c0-ac66-acd865695d76","Type":"ContainerStarted","Data":"94e969866a3e99c287713d4cc32fddd8769c0a0581af0119b122a983f4e9d21a"} Oct 13 21:57:26 crc kubenswrapper[4689]: I1013 21:57:26.612372 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9xjz6" Oct 13 21:57:26 crc kubenswrapper[4689]: I1013 21:57:26.612727 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9xjz6" Oct 13 21:57:26 crc kubenswrapper[4689]: I1013 21:57:26.661948 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9xjz6" Oct 13 21:57:27 crc kubenswrapper[4689]: I1013 21:57:27.544332 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9xjz6" Oct 13 21:57:29 crc kubenswrapper[4689]: I1013 21:57:29.048756 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9xjz6"] Oct 13 21:57:29 crc kubenswrapper[4689]: I1013 21:57:29.518214 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-szfmk" event={"ID":"3a72351b-cf75-43c0-ac66-acd865695d76","Type":"ContainerStarted","Data":"197e671af8ff321ee9fc64a0e751c976e82ce175697b15be0327997b18ba1737"} Oct 13 21:57:29 crc kubenswrapper[4689]: I1013 21:57:29.518355 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9xjz6" podUID="a4d4b186-bfc5-4dd7-b04d-e454769343c3" containerName="registry-server" containerID="cri-o://0bc77e0a76a7643f15ac227229d0bfe52399c647ebedf8f904603db565e6a192" gracePeriod=2 Oct 13 21:57:31 crc kubenswrapper[4689]: I1013 21:57:31.544030 4689 generic.go:334] "Generic (PLEG): container finished" podID="a4d4b186-bfc5-4dd7-b04d-e454769343c3" containerID="0bc77e0a76a7643f15ac227229d0bfe52399c647ebedf8f904603db565e6a192" exitCode=0 Oct 13 21:57:31 crc kubenswrapper[4689]: I1013 21:57:31.544109 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xjz6" event={"ID":"a4d4b186-bfc5-4dd7-b04d-e454769343c3","Type":"ContainerDied","Data":"0bc77e0a76a7643f15ac227229d0bfe52399c647ebedf8f904603db565e6a192"} Oct 13 21:57:31 crc kubenswrapper[4689]: I1013 21:57:31.546932 4689 generic.go:334] "Generic (PLEG): container finished" podID="3a72351b-cf75-43c0-ac66-acd865695d76" containerID="197e671af8ff321ee9fc64a0e751c976e82ce175697b15be0327997b18ba1737" exitCode=0 Oct 13 21:57:31 crc kubenswrapper[4689]: I1013 21:57:31.546979 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-szfmk" event={"ID":"3a72351b-cf75-43c0-ac66-acd865695d76","Type":"ContainerDied","Data":"197e671af8ff321ee9fc64a0e751c976e82ce175697b15be0327997b18ba1737"} Oct 13 21:57:31 crc kubenswrapper[4689]: I1013 21:57:31.796490 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9xjz6" Oct 13 21:57:31 crc kubenswrapper[4689]: I1013 21:57:31.820926 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4d4b186-bfc5-4dd7-b04d-e454769343c3-catalog-content\") pod \"a4d4b186-bfc5-4dd7-b04d-e454769343c3\" (UID: \"a4d4b186-bfc5-4dd7-b04d-e454769343c3\") " Oct 13 21:57:31 crc kubenswrapper[4689]: I1013 21:57:31.821016 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9c2kw\" (UniqueName: \"kubernetes.io/projected/a4d4b186-bfc5-4dd7-b04d-e454769343c3-kube-api-access-9c2kw\") pod \"a4d4b186-bfc5-4dd7-b04d-e454769343c3\" (UID: \"a4d4b186-bfc5-4dd7-b04d-e454769343c3\") " Oct 13 21:57:31 crc kubenswrapper[4689]: I1013 21:57:31.821044 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4d4b186-bfc5-4dd7-b04d-e454769343c3-utilities\") pod \"a4d4b186-bfc5-4dd7-b04d-e454769343c3\" (UID: \"a4d4b186-bfc5-4dd7-b04d-e454769343c3\") " Oct 13 21:57:31 crc kubenswrapper[4689]: I1013 21:57:31.821765 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4d4b186-bfc5-4dd7-b04d-e454769343c3-utilities" (OuterVolumeSpecName: "utilities") pod "a4d4b186-bfc5-4dd7-b04d-e454769343c3" (UID: "a4d4b186-bfc5-4dd7-b04d-e454769343c3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:57:31 crc kubenswrapper[4689]: I1013 21:57:31.822285 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4d4b186-bfc5-4dd7-b04d-e454769343c3-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:57:31 crc kubenswrapper[4689]: I1013 21:57:31.840689 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4d4b186-bfc5-4dd7-b04d-e454769343c3-kube-api-access-9c2kw" (OuterVolumeSpecName: "kube-api-access-9c2kw") pod "a4d4b186-bfc5-4dd7-b04d-e454769343c3" (UID: "a4d4b186-bfc5-4dd7-b04d-e454769343c3"). InnerVolumeSpecName "kube-api-access-9c2kw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:57:31 crc kubenswrapper[4689]: I1013 21:57:31.924043 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9c2kw\" (UniqueName: \"kubernetes.io/projected/a4d4b186-bfc5-4dd7-b04d-e454769343c3-kube-api-access-9c2kw\") on node \"crc\" DevicePath \"\"" Oct 13 21:57:32 crc kubenswrapper[4689]: I1013 21:57:32.563226 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xjz6" event={"ID":"a4d4b186-bfc5-4dd7-b04d-e454769343c3","Type":"ContainerDied","Data":"f10ab6c087a56c47b91f681becc756e4f8f1f70be4df390c36ece19863bbe4fc"} Oct 13 21:57:32 crc kubenswrapper[4689]: I1013 21:57:32.563294 4689 scope.go:117] "RemoveContainer" containerID="0bc77e0a76a7643f15ac227229d0bfe52399c647ebedf8f904603db565e6a192" Oct 13 21:57:32 crc kubenswrapper[4689]: I1013 21:57:32.564149 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9xjz6" Oct 13 21:57:32 crc kubenswrapper[4689]: I1013 21:57:32.591555 4689 scope.go:117] "RemoveContainer" containerID="2aaa00824e855f7cb4adfbb7ce6d388a84d7bb21d5becd233a726f901167669b" Oct 13 21:57:32 crc kubenswrapper[4689]: I1013 21:57:32.753412 4689 scope.go:117] "RemoveContainer" containerID="5c03385e0a36987cddf6a095762ec23a30a8f20d740474b7b964287b0fb0a1e7" Oct 13 21:57:32 crc kubenswrapper[4689]: I1013 21:57:32.905975 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4d4b186-bfc5-4dd7-b04d-e454769343c3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a4d4b186-bfc5-4dd7-b04d-e454769343c3" (UID: "a4d4b186-bfc5-4dd7-b04d-e454769343c3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:57:32 crc kubenswrapper[4689]: I1013 21:57:32.945175 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4d4b186-bfc5-4dd7-b04d-e454769343c3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:57:33 crc kubenswrapper[4689]: I1013 21:57:33.245129 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9xjz6"] Oct 13 21:57:33 crc kubenswrapper[4689]: I1013 21:57:33.256296 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9xjz6"] Oct 13 21:57:33 crc kubenswrapper[4689]: I1013 21:57:33.578453 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-szfmk" event={"ID":"3a72351b-cf75-43c0-ac66-acd865695d76","Type":"ContainerStarted","Data":"73915863d03504e1530ccc46387d25292bb17f334e9c4acd37b8e7fd973db6cb"} Oct 13 21:57:33 crc kubenswrapper[4689]: I1013 21:57:33.601803 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-szfmk" podStartSLOduration=1.989161959 podStartE2EDuration="9.601777873s" podCreationTimestamp="2025-10-13 21:57:24 +0000 UTC" firstStartedPulling="2025-10-13 21:57:25.476946546 +0000 UTC m=+2762.395191631" lastFinishedPulling="2025-10-13 21:57:33.08956246 +0000 UTC m=+2770.007807545" observedRunningTime="2025-10-13 21:57:33.601170469 +0000 UTC m=+2770.519415564" watchObservedRunningTime="2025-10-13 21:57:33.601777873 +0000 UTC m=+2770.520022968" Oct 13 21:57:33 crc kubenswrapper[4689]: I1013 21:57:33.889661 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4d4b186-bfc5-4dd7-b04d-e454769343c3" path="/var/lib/kubelet/pods/a4d4b186-bfc5-4dd7-b04d-e454769343c3/volumes" Oct 13 21:57:34 crc kubenswrapper[4689]: I1013 21:57:34.641907 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-szfmk" Oct 13 21:57:34 crc kubenswrapper[4689]: I1013 21:57:34.641951 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-szfmk" Oct 13 21:57:35 crc kubenswrapper[4689]: I1013 21:57:35.688578 4689 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-szfmk" podUID="3a72351b-cf75-43c0-ac66-acd865695d76" containerName="registry-server" probeResult="failure" output=< Oct 13 21:57:35 crc kubenswrapper[4689]: timeout: failed to connect service ":50051" within 1s Oct 13 21:57:35 crc kubenswrapper[4689]: > Oct 13 21:57:44 crc kubenswrapper[4689]: I1013 21:57:44.689879 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-szfmk" Oct 13 21:57:44 crc kubenswrapper[4689]: I1013 21:57:44.743000 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-szfmk" Oct 13 21:57:44 crc kubenswrapper[4689]: I1013 21:57:44.927097 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-szfmk"] Oct 13 21:57:46 crc kubenswrapper[4689]: I1013 21:57:46.711117 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-szfmk" podUID="3a72351b-cf75-43c0-ac66-acd865695d76" containerName="registry-server" containerID="cri-o://73915863d03504e1530ccc46387d25292bb17f334e9c4acd37b8e7fd973db6cb" gracePeriod=2 Oct 13 21:57:47 crc kubenswrapper[4689]: I1013 21:57:47.728219 4689 generic.go:334] "Generic (PLEG): container finished" podID="3a72351b-cf75-43c0-ac66-acd865695d76" containerID="73915863d03504e1530ccc46387d25292bb17f334e9c4acd37b8e7fd973db6cb" exitCode=0 Oct 13 21:57:47 crc kubenswrapper[4689]: I1013 21:57:47.728298 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-szfmk" event={"ID":"3a72351b-cf75-43c0-ac66-acd865695d76","Type":"ContainerDied","Data":"73915863d03504e1530ccc46387d25292bb17f334e9c4acd37b8e7fd973db6cb"} Oct 13 21:57:54 crc kubenswrapper[4689]: E1013 21:57:54.642335 4689 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 73915863d03504e1530ccc46387d25292bb17f334e9c4acd37b8e7fd973db6cb is running failed: container process not found" containerID="73915863d03504e1530ccc46387d25292bb17f334e9c4acd37b8e7fd973db6cb" cmd=["grpc_health_probe","-addr=:50051"] Oct 13 21:57:54 crc kubenswrapper[4689]: E1013 21:57:54.643304 4689 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 73915863d03504e1530ccc46387d25292bb17f334e9c4acd37b8e7fd973db6cb is running failed: container process not found" containerID="73915863d03504e1530ccc46387d25292bb17f334e9c4acd37b8e7fd973db6cb" cmd=["grpc_health_probe","-addr=:50051"] Oct 13 21:57:54 crc kubenswrapper[4689]: E1013 21:57:54.643713 4689 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 73915863d03504e1530ccc46387d25292bb17f334e9c4acd37b8e7fd973db6cb is running failed: container process not found" containerID="73915863d03504e1530ccc46387d25292bb17f334e9c4acd37b8e7fd973db6cb" cmd=["grpc_health_probe","-addr=:50051"] Oct 13 21:57:54 crc kubenswrapper[4689]: E1013 21:57:54.643746 4689 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 73915863d03504e1530ccc46387d25292bb17f334e9c4acd37b8e7fd973db6cb is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-szfmk" podUID="3a72351b-cf75-43c0-ac66-acd865695d76" containerName="registry-server" Oct 13 21:58:02 crc kubenswrapper[4689]: E1013 21:58:02.422630 4689 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Oct 13 21:58:02 crc kubenswrapper[4689]: E1013 21:58:02.423694 4689 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h6b8t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(2fd6769f-1acf-441d-8569-13baec5fcf72): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 13 21:58:02 crc kubenswrapper[4689]: E1013 21:58:02.424938 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="2fd6769f-1acf-441d-8569-13baec5fcf72" Oct 13 21:58:02 crc kubenswrapper[4689]: I1013 21:58:02.710909 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-szfmk" Oct 13 21:58:02 crc kubenswrapper[4689]: I1013 21:58:02.848413 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a72351b-cf75-43c0-ac66-acd865695d76-catalog-content\") pod \"3a72351b-cf75-43c0-ac66-acd865695d76\" (UID: \"3a72351b-cf75-43c0-ac66-acd865695d76\") " Oct 13 21:58:02 crc kubenswrapper[4689]: I1013 21:58:02.848502 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a72351b-cf75-43c0-ac66-acd865695d76-utilities\") pod \"3a72351b-cf75-43c0-ac66-acd865695d76\" (UID: \"3a72351b-cf75-43c0-ac66-acd865695d76\") " Oct 13 21:58:02 crc kubenswrapper[4689]: I1013 21:58:02.848577 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-977h7\" (UniqueName: \"kubernetes.io/projected/3a72351b-cf75-43c0-ac66-acd865695d76-kube-api-access-977h7\") pod \"3a72351b-cf75-43c0-ac66-acd865695d76\" (UID: \"3a72351b-cf75-43c0-ac66-acd865695d76\") " Oct 13 21:58:02 crc kubenswrapper[4689]: I1013 21:58:02.849318 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a72351b-cf75-43c0-ac66-acd865695d76-utilities" (OuterVolumeSpecName: "utilities") pod "3a72351b-cf75-43c0-ac66-acd865695d76" (UID: "3a72351b-cf75-43c0-ac66-acd865695d76"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:58:02 crc kubenswrapper[4689]: I1013 21:58:02.854537 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a72351b-cf75-43c0-ac66-acd865695d76-kube-api-access-977h7" (OuterVolumeSpecName: "kube-api-access-977h7") pod "3a72351b-cf75-43c0-ac66-acd865695d76" (UID: "3a72351b-cf75-43c0-ac66-acd865695d76"). InnerVolumeSpecName "kube-api-access-977h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 21:58:02 crc kubenswrapper[4689]: I1013 21:58:02.880178 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-szfmk" event={"ID":"3a72351b-cf75-43c0-ac66-acd865695d76","Type":"ContainerDied","Data":"94e969866a3e99c287713d4cc32fddd8769c0a0581af0119b122a983f4e9d21a"} Oct 13 21:58:02 crc kubenswrapper[4689]: I1013 21:58:02.880239 4689 scope.go:117] "RemoveContainer" containerID="73915863d03504e1530ccc46387d25292bb17f334e9c4acd37b8e7fd973db6cb" Oct 13 21:58:02 crc kubenswrapper[4689]: I1013 21:58:02.880248 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-szfmk" Oct 13 21:58:02 crc kubenswrapper[4689]: E1013 21:58:02.882508 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="2fd6769f-1acf-441d-8569-13baec5fcf72" Oct 13 21:58:02 crc kubenswrapper[4689]: I1013 21:58:02.899295 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a72351b-cf75-43c0-ac66-acd865695d76-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3a72351b-cf75-43c0-ac66-acd865695d76" (UID: "3a72351b-cf75-43c0-ac66-acd865695d76"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 21:58:02 crc kubenswrapper[4689]: I1013 21:58:02.914673 4689 scope.go:117] "RemoveContainer" containerID="197e671af8ff321ee9fc64a0e751c976e82ce175697b15be0327997b18ba1737" Oct 13 21:58:02 crc kubenswrapper[4689]: I1013 21:58:02.934650 4689 scope.go:117] "RemoveContainer" containerID="c9f96a630c5656bb3459fac1ff25661d828bcd829a8100f55bace997dd9008f3" Oct 13 21:58:02 crc kubenswrapper[4689]: I1013 21:58:02.950943 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a72351b-cf75-43c0-ac66-acd865695d76-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 21:58:02 crc kubenswrapper[4689]: I1013 21:58:02.950969 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a72351b-cf75-43c0-ac66-acd865695d76-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 21:58:02 crc kubenswrapper[4689]: I1013 21:58:02.950979 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-977h7\" (UniqueName: \"kubernetes.io/projected/3a72351b-cf75-43c0-ac66-acd865695d76-kube-api-access-977h7\") on node \"crc\" DevicePath \"\"" Oct 13 21:58:03 crc kubenswrapper[4689]: I1013 21:58:03.231286 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-szfmk"] Oct 13 21:58:03 crc kubenswrapper[4689]: I1013 21:58:03.238705 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-szfmk"] Oct 13 21:58:03 crc kubenswrapper[4689]: I1013 21:58:03.883134 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a72351b-cf75-43c0-ac66-acd865695d76" path="/var/lib/kubelet/pods/3a72351b-cf75-43c0-ac66-acd865695d76/volumes" Oct 13 21:58:15 crc kubenswrapper[4689]: I1013 21:58:15.346847 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 13 21:58:17 crc kubenswrapper[4689]: I1013 21:58:17.017124 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2fd6769f-1acf-441d-8569-13baec5fcf72","Type":"ContainerStarted","Data":"2ea9061742c4120956c5a652f7815f61a5311ab210ea7056b78a84262cf53ac7"} Oct 13 21:58:17 crc kubenswrapper[4689]: I1013 21:58:17.042400 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.243788122 podStartE2EDuration="57.042376871s" podCreationTimestamp="2025-10-13 21:57:20 +0000 UTC" firstStartedPulling="2025-10-13 21:57:22.545727616 +0000 UTC m=+2759.463972701" lastFinishedPulling="2025-10-13 21:58:15.344316355 +0000 UTC m=+2812.262561450" observedRunningTime="2025-10-13 21:58:17.033880338 +0000 UTC m=+2813.952125463" watchObservedRunningTime="2025-10-13 21:58:17.042376871 +0000 UTC m=+2813.960621976" Oct 13 21:58:53 crc kubenswrapper[4689]: I1013 21:58:53.859013 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:58:53 crc kubenswrapper[4689]: I1013 21:58:53.859879 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:59:23 crc kubenswrapper[4689]: I1013 21:59:23.858870 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:59:23 crc kubenswrapper[4689]: I1013 21:59:23.859517 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:59:53 crc kubenswrapper[4689]: I1013 21:59:53.859016 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 21:59:53 crc kubenswrapper[4689]: I1013 21:59:53.859467 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 21:59:53 crc kubenswrapper[4689]: I1013 21:59:53.859509 4689 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 21:59:53 crc kubenswrapper[4689]: I1013 21:59:53.860261 4689 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5a8d6bd605d3b7ccb12aec891829ef3ded1c4a5a8f46d8f1bca1fa1375f530b3"} pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 21:59:53 crc kubenswrapper[4689]: I1013 21:59:53.860308 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" containerID="cri-o://5a8d6bd605d3b7ccb12aec891829ef3ded1c4a5a8f46d8f1bca1fa1375f530b3" gracePeriod=600 Oct 13 21:59:54 crc kubenswrapper[4689]: I1013 21:59:54.972481 4689 generic.go:334] "Generic (PLEG): container finished" podID="1863da92-265f-451e-a741-a184c8d3f781" containerID="5a8d6bd605d3b7ccb12aec891829ef3ded1c4a5a8f46d8f1bca1fa1375f530b3" exitCode=0 Oct 13 21:59:54 crc kubenswrapper[4689]: I1013 21:59:54.972558 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerDied","Data":"5a8d6bd605d3b7ccb12aec891829ef3ded1c4a5a8f46d8f1bca1fa1375f530b3"} Oct 13 21:59:54 crc kubenswrapper[4689]: I1013 21:59:54.973239 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerStarted","Data":"fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0"} Oct 13 21:59:54 crc kubenswrapper[4689]: I1013 21:59:54.973279 4689 scope.go:117] "RemoveContainer" containerID="d99d155686eb30cf265be11d12999cbe88f4b905a6aaf4f40391fdd46e19f027" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.183015 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2"] Oct 13 22:00:00 crc kubenswrapper[4689]: E1013 22:00:00.184120 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a72351b-cf75-43c0-ac66-acd865695d76" containerName="extract-content" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.184138 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a72351b-cf75-43c0-ac66-acd865695d76" containerName="extract-content" Oct 13 22:00:00 crc kubenswrapper[4689]: E1013 22:00:00.184166 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4d4b186-bfc5-4dd7-b04d-e454769343c3" containerName="registry-server" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.184176 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4d4b186-bfc5-4dd7-b04d-e454769343c3" containerName="registry-server" Oct 13 22:00:00 crc kubenswrapper[4689]: E1013 22:00:00.184190 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4d4b186-bfc5-4dd7-b04d-e454769343c3" containerName="extract-utilities" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.184198 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4d4b186-bfc5-4dd7-b04d-e454769343c3" containerName="extract-utilities" Oct 13 22:00:00 crc kubenswrapper[4689]: E1013 22:00:00.184213 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4d4b186-bfc5-4dd7-b04d-e454769343c3" containerName="extract-content" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.184220 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4d4b186-bfc5-4dd7-b04d-e454769343c3" containerName="extract-content" Oct 13 22:00:00 crc kubenswrapper[4689]: E1013 22:00:00.184251 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a72351b-cf75-43c0-ac66-acd865695d76" containerName="registry-server" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.184258 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a72351b-cf75-43c0-ac66-acd865695d76" containerName="registry-server" Oct 13 22:00:00 crc kubenswrapper[4689]: E1013 22:00:00.184269 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a72351b-cf75-43c0-ac66-acd865695d76" containerName="extract-utilities" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.184278 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a72351b-cf75-43c0-ac66-acd865695d76" containerName="extract-utilities" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.184499 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4d4b186-bfc5-4dd7-b04d-e454769343c3" containerName="registry-server" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.184527 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a72351b-cf75-43c0-ac66-acd865695d76" containerName="registry-server" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.185394 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.187547 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.193833 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2"] Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.218243 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.298602 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lc44j\" (UniqueName: \"kubernetes.io/projected/6df9ad9f-db2b-4693-8962-cb6a68c2c392-kube-api-access-lc44j\") pod \"collect-profiles-29339880-6mcc2\" (UID: \"6df9ad9f-db2b-4693-8962-cb6a68c2c392\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.298792 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6df9ad9f-db2b-4693-8962-cb6a68c2c392-config-volume\") pod \"collect-profiles-29339880-6mcc2\" (UID: \"6df9ad9f-db2b-4693-8962-cb6a68c2c392\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.298826 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6df9ad9f-db2b-4693-8962-cb6a68c2c392-secret-volume\") pod \"collect-profiles-29339880-6mcc2\" (UID: \"6df9ad9f-db2b-4693-8962-cb6a68c2c392\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.400844 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6df9ad9f-db2b-4693-8962-cb6a68c2c392-config-volume\") pod \"collect-profiles-29339880-6mcc2\" (UID: \"6df9ad9f-db2b-4693-8962-cb6a68c2c392\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.400906 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6df9ad9f-db2b-4693-8962-cb6a68c2c392-secret-volume\") pod \"collect-profiles-29339880-6mcc2\" (UID: \"6df9ad9f-db2b-4693-8962-cb6a68c2c392\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.400962 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lc44j\" (UniqueName: \"kubernetes.io/projected/6df9ad9f-db2b-4693-8962-cb6a68c2c392-kube-api-access-lc44j\") pod \"collect-profiles-29339880-6mcc2\" (UID: \"6df9ad9f-db2b-4693-8962-cb6a68c2c392\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.402274 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6df9ad9f-db2b-4693-8962-cb6a68c2c392-config-volume\") pod \"collect-profiles-29339880-6mcc2\" (UID: \"6df9ad9f-db2b-4693-8962-cb6a68c2c392\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.415396 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6df9ad9f-db2b-4693-8962-cb6a68c2c392-secret-volume\") pod \"collect-profiles-29339880-6mcc2\" (UID: \"6df9ad9f-db2b-4693-8962-cb6a68c2c392\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.423286 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lc44j\" (UniqueName: \"kubernetes.io/projected/6df9ad9f-db2b-4693-8962-cb6a68c2c392-kube-api-access-lc44j\") pod \"collect-profiles-29339880-6mcc2\" (UID: \"6df9ad9f-db2b-4693-8962-cb6a68c2c392\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.534423 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2" Oct 13 22:00:00 crc kubenswrapper[4689]: I1013 22:00:00.989159 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2"] Oct 13 22:00:01 crc kubenswrapper[4689]: I1013 22:00:01.044651 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2" event={"ID":"6df9ad9f-db2b-4693-8962-cb6a68c2c392","Type":"ContainerStarted","Data":"9aab577ce64bc2716c288558ce249fca884093a28616632196346220c50bfdde"} Oct 13 22:00:02 crc kubenswrapper[4689]: I1013 22:00:02.054882 4689 generic.go:334] "Generic (PLEG): container finished" podID="6df9ad9f-db2b-4693-8962-cb6a68c2c392" containerID="04225d2fa5390269b55bdc398c8ee70696fcd1cb6193064198168d461c5ea0eb" exitCode=0 Oct 13 22:00:02 crc kubenswrapper[4689]: I1013 22:00:02.055272 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2" event={"ID":"6df9ad9f-db2b-4693-8962-cb6a68c2c392","Type":"ContainerDied","Data":"04225d2fa5390269b55bdc398c8ee70696fcd1cb6193064198168d461c5ea0eb"} Oct 13 22:00:03 crc kubenswrapper[4689]: I1013 22:00:03.431073 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2" Oct 13 22:00:03 crc kubenswrapper[4689]: I1013 22:00:03.567405 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6df9ad9f-db2b-4693-8962-cb6a68c2c392-secret-volume\") pod \"6df9ad9f-db2b-4693-8962-cb6a68c2c392\" (UID: \"6df9ad9f-db2b-4693-8962-cb6a68c2c392\") " Oct 13 22:00:03 crc kubenswrapper[4689]: I1013 22:00:03.567642 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lc44j\" (UniqueName: \"kubernetes.io/projected/6df9ad9f-db2b-4693-8962-cb6a68c2c392-kube-api-access-lc44j\") pod \"6df9ad9f-db2b-4693-8962-cb6a68c2c392\" (UID: \"6df9ad9f-db2b-4693-8962-cb6a68c2c392\") " Oct 13 22:00:03 crc kubenswrapper[4689]: I1013 22:00:03.567663 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6df9ad9f-db2b-4693-8962-cb6a68c2c392-config-volume\") pod \"6df9ad9f-db2b-4693-8962-cb6a68c2c392\" (UID: \"6df9ad9f-db2b-4693-8962-cb6a68c2c392\") " Oct 13 22:00:03 crc kubenswrapper[4689]: I1013 22:00:03.568651 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6df9ad9f-db2b-4693-8962-cb6a68c2c392-config-volume" (OuterVolumeSpecName: "config-volume") pod "6df9ad9f-db2b-4693-8962-cb6a68c2c392" (UID: "6df9ad9f-db2b-4693-8962-cb6a68c2c392"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 22:00:03 crc kubenswrapper[4689]: I1013 22:00:03.580741 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6df9ad9f-db2b-4693-8962-cb6a68c2c392-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6df9ad9f-db2b-4693-8962-cb6a68c2c392" (UID: "6df9ad9f-db2b-4693-8962-cb6a68c2c392"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 22:00:03 crc kubenswrapper[4689]: I1013 22:00:03.580806 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6df9ad9f-db2b-4693-8962-cb6a68c2c392-kube-api-access-lc44j" (OuterVolumeSpecName: "kube-api-access-lc44j") pod "6df9ad9f-db2b-4693-8962-cb6a68c2c392" (UID: "6df9ad9f-db2b-4693-8962-cb6a68c2c392"). InnerVolumeSpecName "kube-api-access-lc44j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:00:03 crc kubenswrapper[4689]: I1013 22:00:03.669474 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lc44j\" (UniqueName: \"kubernetes.io/projected/6df9ad9f-db2b-4693-8962-cb6a68c2c392-kube-api-access-lc44j\") on node \"crc\" DevicePath \"\"" Oct 13 22:00:03 crc kubenswrapper[4689]: I1013 22:00:03.669514 4689 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6df9ad9f-db2b-4693-8962-cb6a68c2c392-config-volume\") on node \"crc\" DevicePath \"\"" Oct 13 22:00:03 crc kubenswrapper[4689]: I1013 22:00:03.669524 4689 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6df9ad9f-db2b-4693-8962-cb6a68c2c392-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 13 22:00:04 crc kubenswrapper[4689]: I1013 22:00:04.078483 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2" event={"ID":"6df9ad9f-db2b-4693-8962-cb6a68c2c392","Type":"ContainerDied","Data":"9aab577ce64bc2716c288558ce249fca884093a28616632196346220c50bfdde"} Oct 13 22:00:04 crc kubenswrapper[4689]: I1013 22:00:04.079059 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9aab577ce64bc2716c288558ce249fca884093a28616632196346220c50bfdde" Oct 13 22:00:04 crc kubenswrapper[4689]: I1013 22:00:04.079099 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339880-6mcc2" Oct 13 22:00:04 crc kubenswrapper[4689]: I1013 22:00:04.503085 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj"] Oct 13 22:00:04 crc kubenswrapper[4689]: I1013 22:00:04.509531 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339835-96hsj"] Oct 13 22:00:05 crc kubenswrapper[4689]: I1013 22:00:05.898958 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c4cf8f8-edb9-4a23-832a-bf70c05cdce6" path="/var/lib/kubelet/pods/8c4cf8f8-edb9-4a23-832a-bf70c05cdce6/volumes" Oct 13 22:00:29 crc kubenswrapper[4689]: I1013 22:00:29.142485 4689 scope.go:117] "RemoveContainer" containerID="7cb83203217a35475839a6768bd2fbe9681c24d31eea255de13408cbc233ce6a" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.155388 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29339881-4jbhr"] Oct 13 22:01:00 crc kubenswrapper[4689]: E1013 22:01:00.156709 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6df9ad9f-db2b-4693-8962-cb6a68c2c392" containerName="collect-profiles" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.156734 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="6df9ad9f-db2b-4693-8962-cb6a68c2c392" containerName="collect-profiles" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.157082 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="6df9ad9f-db2b-4693-8962-cb6a68c2c392" containerName="collect-profiles" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.158124 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29339881-4jbhr" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.168568 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29339881-4jbhr"] Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.240165 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tc5b\" (UniqueName: \"kubernetes.io/projected/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-kube-api-access-9tc5b\") pod \"keystone-cron-29339881-4jbhr\" (UID: \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\") " pod="openstack/keystone-cron-29339881-4jbhr" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.240628 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-combined-ca-bundle\") pod \"keystone-cron-29339881-4jbhr\" (UID: \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\") " pod="openstack/keystone-cron-29339881-4jbhr" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.240706 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-config-data\") pod \"keystone-cron-29339881-4jbhr\" (UID: \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\") " pod="openstack/keystone-cron-29339881-4jbhr" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.240868 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-fernet-keys\") pod \"keystone-cron-29339881-4jbhr\" (UID: \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\") " pod="openstack/keystone-cron-29339881-4jbhr" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.342468 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-fernet-keys\") pod \"keystone-cron-29339881-4jbhr\" (UID: \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\") " pod="openstack/keystone-cron-29339881-4jbhr" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.342602 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tc5b\" (UniqueName: \"kubernetes.io/projected/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-kube-api-access-9tc5b\") pod \"keystone-cron-29339881-4jbhr\" (UID: \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\") " pod="openstack/keystone-cron-29339881-4jbhr" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.342698 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-combined-ca-bundle\") pod \"keystone-cron-29339881-4jbhr\" (UID: \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\") " pod="openstack/keystone-cron-29339881-4jbhr" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.342782 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-config-data\") pod \"keystone-cron-29339881-4jbhr\" (UID: \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\") " pod="openstack/keystone-cron-29339881-4jbhr" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.349634 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-combined-ca-bundle\") pod \"keystone-cron-29339881-4jbhr\" (UID: \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\") " pod="openstack/keystone-cron-29339881-4jbhr" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.350340 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-config-data\") pod \"keystone-cron-29339881-4jbhr\" (UID: \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\") " pod="openstack/keystone-cron-29339881-4jbhr" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.351239 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-fernet-keys\") pod \"keystone-cron-29339881-4jbhr\" (UID: \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\") " pod="openstack/keystone-cron-29339881-4jbhr" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.357469 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tc5b\" (UniqueName: \"kubernetes.io/projected/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-kube-api-access-9tc5b\") pod \"keystone-cron-29339881-4jbhr\" (UID: \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\") " pod="openstack/keystone-cron-29339881-4jbhr" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.481262 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29339881-4jbhr" Oct 13 22:01:00 crc kubenswrapper[4689]: I1013 22:01:00.912825 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29339881-4jbhr"] Oct 13 22:01:01 crc kubenswrapper[4689]: I1013 22:01:01.613379 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29339881-4jbhr" event={"ID":"d3d13686-7ebb-4ffa-bde4-4c36501a6b21","Type":"ContainerStarted","Data":"12dc5a655be6debee74f0e8dc630102cae5aafe28caf42ce4e59adcaec69cfb7"} Oct 13 22:01:01 crc kubenswrapper[4689]: I1013 22:01:01.613810 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29339881-4jbhr" event={"ID":"d3d13686-7ebb-4ffa-bde4-4c36501a6b21","Type":"ContainerStarted","Data":"cb5c04c23d71c64fea999da7e68cb82f9b4457192c00ab5234139b08e3b9e09b"} Oct 13 22:01:01 crc kubenswrapper[4689]: I1013 22:01:01.632288 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29339881-4jbhr" podStartSLOduration=1.632264659 podStartE2EDuration="1.632264659s" podCreationTimestamp="2025-10-13 22:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 22:01:01.631772227 +0000 UTC m=+2978.550017342" watchObservedRunningTime="2025-10-13 22:01:01.632264659 +0000 UTC m=+2978.550509744" Oct 13 22:01:03 crc kubenswrapper[4689]: I1013 22:01:03.632858 4689 generic.go:334] "Generic (PLEG): container finished" podID="d3d13686-7ebb-4ffa-bde4-4c36501a6b21" containerID="12dc5a655be6debee74f0e8dc630102cae5aafe28caf42ce4e59adcaec69cfb7" exitCode=0 Oct 13 22:01:03 crc kubenswrapper[4689]: I1013 22:01:03.633029 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29339881-4jbhr" event={"ID":"d3d13686-7ebb-4ffa-bde4-4c36501a6b21","Type":"ContainerDied","Data":"12dc5a655be6debee74f0e8dc630102cae5aafe28caf42ce4e59adcaec69cfb7"} Oct 13 22:01:04 crc kubenswrapper[4689]: I1013 22:01:04.974042 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29339881-4jbhr" Oct 13 22:01:05 crc kubenswrapper[4689]: I1013 22:01:05.139508 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-fernet-keys\") pod \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\" (UID: \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\") " Oct 13 22:01:05 crc kubenswrapper[4689]: I1013 22:01:05.139957 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-config-data\") pod \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\" (UID: \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\") " Oct 13 22:01:05 crc kubenswrapper[4689]: I1013 22:01:05.140031 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-combined-ca-bundle\") pod \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\" (UID: \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\") " Oct 13 22:01:05 crc kubenswrapper[4689]: I1013 22:01:05.140107 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tc5b\" (UniqueName: \"kubernetes.io/projected/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-kube-api-access-9tc5b\") pod \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\" (UID: \"d3d13686-7ebb-4ffa-bde4-4c36501a6b21\") " Oct 13 22:01:05 crc kubenswrapper[4689]: I1013 22:01:05.146115 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d3d13686-7ebb-4ffa-bde4-4c36501a6b21" (UID: "d3d13686-7ebb-4ffa-bde4-4c36501a6b21"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 22:01:05 crc kubenswrapper[4689]: I1013 22:01:05.154743 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-kube-api-access-9tc5b" (OuterVolumeSpecName: "kube-api-access-9tc5b") pod "d3d13686-7ebb-4ffa-bde4-4c36501a6b21" (UID: "d3d13686-7ebb-4ffa-bde4-4c36501a6b21"). InnerVolumeSpecName "kube-api-access-9tc5b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:01:05 crc kubenswrapper[4689]: I1013 22:01:05.168248 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d3d13686-7ebb-4ffa-bde4-4c36501a6b21" (UID: "d3d13686-7ebb-4ffa-bde4-4c36501a6b21"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 22:01:05 crc kubenswrapper[4689]: I1013 22:01:05.196797 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-config-data" (OuterVolumeSpecName: "config-data") pod "d3d13686-7ebb-4ffa-bde4-4c36501a6b21" (UID: "d3d13686-7ebb-4ffa-bde4-4c36501a6b21"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 22:01:05 crc kubenswrapper[4689]: I1013 22:01:05.242066 4689 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 13 22:01:05 crc kubenswrapper[4689]: I1013 22:01:05.242101 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tc5b\" (UniqueName: \"kubernetes.io/projected/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-kube-api-access-9tc5b\") on node \"crc\" DevicePath \"\"" Oct 13 22:01:05 crc kubenswrapper[4689]: I1013 22:01:05.242115 4689 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 13 22:01:05 crc kubenswrapper[4689]: I1013 22:01:05.242129 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3d13686-7ebb-4ffa-bde4-4c36501a6b21-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 22:01:05 crc kubenswrapper[4689]: I1013 22:01:05.649530 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29339881-4jbhr" event={"ID":"d3d13686-7ebb-4ffa-bde4-4c36501a6b21","Type":"ContainerDied","Data":"cb5c04c23d71c64fea999da7e68cb82f9b4457192c00ab5234139b08e3b9e09b"} Oct 13 22:01:05 crc kubenswrapper[4689]: I1013 22:01:05.649575 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb5c04c23d71c64fea999da7e68cb82f9b4457192c00ab5234139b08e3b9e09b" Oct 13 22:01:05 crc kubenswrapper[4689]: I1013 22:01:05.649610 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29339881-4jbhr" Oct 13 22:02:23 crc kubenswrapper[4689]: I1013 22:02:23.859561 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 22:02:23 crc kubenswrapper[4689]: I1013 22:02:23.860199 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 22:02:53 crc kubenswrapper[4689]: I1013 22:02:53.858901 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 22:02:53 crc kubenswrapper[4689]: I1013 22:02:53.859889 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 22:03:23 crc kubenswrapper[4689]: I1013 22:03:23.858847 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 22:03:23 crc kubenswrapper[4689]: I1013 22:03:23.859421 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 22:03:23 crc kubenswrapper[4689]: I1013 22:03:23.859478 4689 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 22:03:23 crc kubenswrapper[4689]: I1013 22:03:23.860200 4689 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0"} pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 22:03:23 crc kubenswrapper[4689]: I1013 22:03:23.860272 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" containerID="cri-o://fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" gracePeriod=600 Oct 13 22:03:24 crc kubenswrapper[4689]: E1013 22:03:24.005765 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:03:24 crc kubenswrapper[4689]: I1013 22:03:24.994644 4689 generic.go:334] "Generic (PLEG): container finished" podID="1863da92-265f-451e-a741-a184c8d3f781" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" exitCode=0 Oct 13 22:03:24 crc kubenswrapper[4689]: I1013 22:03:24.994720 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerDied","Data":"fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0"} Oct 13 22:03:24 crc kubenswrapper[4689]: I1013 22:03:24.995211 4689 scope.go:117] "RemoveContainer" containerID="5a8d6bd605d3b7ccb12aec891829ef3ded1c4a5a8f46d8f1bca1fa1375f530b3" Oct 13 22:03:24 crc kubenswrapper[4689]: I1013 22:03:24.995979 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:03:24 crc kubenswrapper[4689]: E1013 22:03:24.996538 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:03:26 crc kubenswrapper[4689]: E1013 22:03:26.162027 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0.scope\": RecentStats: unable to find data in memory cache]" Oct 13 22:03:36 crc kubenswrapper[4689]: E1013 22:03:36.423244 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0.scope\": RecentStats: unable to find data in memory cache]" Oct 13 22:03:36 crc kubenswrapper[4689]: I1013 22:03:36.868444 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:03:36 crc kubenswrapper[4689]: E1013 22:03:36.869035 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:03:46 crc kubenswrapper[4689]: E1013 22:03:46.638400 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0.scope\": RecentStats: unable to find data in memory cache]" Oct 13 22:03:47 crc kubenswrapper[4689]: I1013 22:03:47.867191 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:03:47 crc kubenswrapper[4689]: E1013 22:03:47.867733 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:03:56 crc kubenswrapper[4689]: E1013 22:03:56.888321 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0.scope\": RecentStats: unable to find data in memory cache]" Oct 13 22:03:58 crc kubenswrapper[4689]: I1013 22:03:58.867944 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:03:58 crc kubenswrapper[4689]: E1013 22:03:58.868534 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:04:04 crc kubenswrapper[4689]: I1013 22:04:04.993925 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2c2gw"] Oct 13 22:04:04 crc kubenswrapper[4689]: E1013 22:04:04.994858 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3d13686-7ebb-4ffa-bde4-4c36501a6b21" containerName="keystone-cron" Oct 13 22:04:04 crc kubenswrapper[4689]: I1013 22:04:04.994870 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3d13686-7ebb-4ffa-bde4-4c36501a6b21" containerName="keystone-cron" Oct 13 22:04:04 crc kubenswrapper[4689]: I1013 22:04:04.995073 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3d13686-7ebb-4ffa-bde4-4c36501a6b21" containerName="keystone-cron" Oct 13 22:04:04 crc kubenswrapper[4689]: I1013 22:04:04.996384 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2c2gw" Oct 13 22:04:05 crc kubenswrapper[4689]: I1013 22:04:05.000615 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2c2gw"] Oct 13 22:04:05 crc kubenswrapper[4689]: I1013 22:04:05.158314 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82b8e46-35f1-4663-b9b2-291d73783c47-catalog-content\") pod \"redhat-marketplace-2c2gw\" (UID: \"d82b8e46-35f1-4663-b9b2-291d73783c47\") " pod="openshift-marketplace/redhat-marketplace-2c2gw" Oct 13 22:04:05 crc kubenswrapper[4689]: I1013 22:04:05.158715 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dx2sg\" (UniqueName: \"kubernetes.io/projected/d82b8e46-35f1-4663-b9b2-291d73783c47-kube-api-access-dx2sg\") pod \"redhat-marketplace-2c2gw\" (UID: \"d82b8e46-35f1-4663-b9b2-291d73783c47\") " pod="openshift-marketplace/redhat-marketplace-2c2gw" Oct 13 22:04:05 crc kubenswrapper[4689]: I1013 22:04:05.158749 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82b8e46-35f1-4663-b9b2-291d73783c47-utilities\") pod \"redhat-marketplace-2c2gw\" (UID: \"d82b8e46-35f1-4663-b9b2-291d73783c47\") " pod="openshift-marketplace/redhat-marketplace-2c2gw" Oct 13 22:04:05 crc kubenswrapper[4689]: I1013 22:04:05.259903 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82b8e46-35f1-4663-b9b2-291d73783c47-catalog-content\") pod \"redhat-marketplace-2c2gw\" (UID: \"d82b8e46-35f1-4663-b9b2-291d73783c47\") " pod="openshift-marketplace/redhat-marketplace-2c2gw" Oct 13 22:04:05 crc kubenswrapper[4689]: I1013 22:04:05.259994 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dx2sg\" (UniqueName: \"kubernetes.io/projected/d82b8e46-35f1-4663-b9b2-291d73783c47-kube-api-access-dx2sg\") pod \"redhat-marketplace-2c2gw\" (UID: \"d82b8e46-35f1-4663-b9b2-291d73783c47\") " pod="openshift-marketplace/redhat-marketplace-2c2gw" Oct 13 22:04:05 crc kubenswrapper[4689]: I1013 22:04:05.260027 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82b8e46-35f1-4663-b9b2-291d73783c47-utilities\") pod \"redhat-marketplace-2c2gw\" (UID: \"d82b8e46-35f1-4663-b9b2-291d73783c47\") " pod="openshift-marketplace/redhat-marketplace-2c2gw" Oct 13 22:04:05 crc kubenswrapper[4689]: I1013 22:04:05.260498 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82b8e46-35f1-4663-b9b2-291d73783c47-utilities\") pod \"redhat-marketplace-2c2gw\" (UID: \"d82b8e46-35f1-4663-b9b2-291d73783c47\") " pod="openshift-marketplace/redhat-marketplace-2c2gw" Oct 13 22:04:05 crc kubenswrapper[4689]: I1013 22:04:05.260719 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82b8e46-35f1-4663-b9b2-291d73783c47-catalog-content\") pod \"redhat-marketplace-2c2gw\" (UID: \"d82b8e46-35f1-4663-b9b2-291d73783c47\") " pod="openshift-marketplace/redhat-marketplace-2c2gw" Oct 13 22:04:05 crc kubenswrapper[4689]: I1013 22:04:05.278786 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dx2sg\" (UniqueName: \"kubernetes.io/projected/d82b8e46-35f1-4663-b9b2-291d73783c47-kube-api-access-dx2sg\") pod \"redhat-marketplace-2c2gw\" (UID: \"d82b8e46-35f1-4663-b9b2-291d73783c47\") " pod="openshift-marketplace/redhat-marketplace-2c2gw" Oct 13 22:04:05 crc kubenswrapper[4689]: I1013 22:04:05.326649 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2c2gw" Oct 13 22:04:05 crc kubenswrapper[4689]: I1013 22:04:05.764712 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2c2gw"] Oct 13 22:04:06 crc kubenswrapper[4689]: I1013 22:04:06.355817 4689 generic.go:334] "Generic (PLEG): container finished" podID="d82b8e46-35f1-4663-b9b2-291d73783c47" containerID="33b179c448b822539e192cb5f1a6efc760e1707f8f45e3df1be3c790dd0b62b4" exitCode=0 Oct 13 22:04:06 crc kubenswrapper[4689]: I1013 22:04:06.355902 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2c2gw" event={"ID":"d82b8e46-35f1-4663-b9b2-291d73783c47","Type":"ContainerDied","Data":"33b179c448b822539e192cb5f1a6efc760e1707f8f45e3df1be3c790dd0b62b4"} Oct 13 22:04:06 crc kubenswrapper[4689]: I1013 22:04:06.356113 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2c2gw" event={"ID":"d82b8e46-35f1-4663-b9b2-291d73783c47","Type":"ContainerStarted","Data":"3bc8d4a49b438d3afb3cf7448a86f59803cfd94c637c7ea4ca313031e84cec8d"} Oct 13 22:04:06 crc kubenswrapper[4689]: I1013 22:04:06.358075 4689 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 22:04:07 crc kubenswrapper[4689]: E1013 22:04:07.131012 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0.scope\": RecentStats: unable to find data in memory cache]" Oct 13 22:04:07 crc kubenswrapper[4689]: I1013 22:04:07.366827 4689 generic.go:334] "Generic (PLEG): container finished" podID="d82b8e46-35f1-4663-b9b2-291d73783c47" containerID="ddec4d2d6b2966e402354170be7013bb08fdaa74e2d276cd8e4a2810724c2791" exitCode=0 Oct 13 22:04:07 crc kubenswrapper[4689]: I1013 22:04:07.366868 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2c2gw" event={"ID":"d82b8e46-35f1-4663-b9b2-291d73783c47","Type":"ContainerDied","Data":"ddec4d2d6b2966e402354170be7013bb08fdaa74e2d276cd8e4a2810724c2791"} Oct 13 22:04:08 crc kubenswrapper[4689]: I1013 22:04:08.377828 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2c2gw" event={"ID":"d82b8e46-35f1-4663-b9b2-291d73783c47","Type":"ContainerStarted","Data":"04ffdd64d3347b2a1e479cb254f514a40b2c85482eba6bd249c3ced79309ac85"} Oct 13 22:04:08 crc kubenswrapper[4689]: I1013 22:04:08.394777 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2c2gw" podStartSLOduration=2.968788929 podStartE2EDuration="4.394761454s" podCreationTimestamp="2025-10-13 22:04:04 +0000 UTC" firstStartedPulling="2025-10-13 22:04:06.35784556 +0000 UTC m=+3163.276090645" lastFinishedPulling="2025-10-13 22:04:07.783818075 +0000 UTC m=+3164.702063170" observedRunningTime="2025-10-13 22:04:08.392808407 +0000 UTC m=+3165.311053492" watchObservedRunningTime="2025-10-13 22:04:08.394761454 +0000 UTC m=+3165.313006539" Oct 13 22:04:12 crc kubenswrapper[4689]: I1013 22:04:12.867639 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:04:12 crc kubenswrapper[4689]: E1013 22:04:12.868500 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:04:15 crc kubenswrapper[4689]: I1013 22:04:15.327558 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2c2gw" Oct 13 22:04:15 crc kubenswrapper[4689]: I1013 22:04:15.327853 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2c2gw" Oct 13 22:04:15 crc kubenswrapper[4689]: I1013 22:04:15.388659 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2c2gw" Oct 13 22:04:15 crc kubenswrapper[4689]: I1013 22:04:15.483704 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2c2gw" Oct 13 22:04:15 crc kubenswrapper[4689]: I1013 22:04:15.623760 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2c2gw"] Oct 13 22:04:17 crc kubenswrapper[4689]: E1013 22:04:17.371668 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0.scope\": RecentStats: unable to find data in memory cache]" Oct 13 22:04:17 crc kubenswrapper[4689]: I1013 22:04:17.454820 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2c2gw" podUID="d82b8e46-35f1-4663-b9b2-291d73783c47" containerName="registry-server" containerID="cri-o://04ffdd64d3347b2a1e479cb254f514a40b2c85482eba6bd249c3ced79309ac85" gracePeriod=2 Oct 13 22:04:18 crc kubenswrapper[4689]: I1013 22:04:18.467650 4689 generic.go:334] "Generic (PLEG): container finished" podID="d82b8e46-35f1-4663-b9b2-291d73783c47" containerID="04ffdd64d3347b2a1e479cb254f514a40b2c85482eba6bd249c3ced79309ac85" exitCode=0 Oct 13 22:04:18 crc kubenswrapper[4689]: I1013 22:04:18.467750 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2c2gw" event={"ID":"d82b8e46-35f1-4663-b9b2-291d73783c47","Type":"ContainerDied","Data":"04ffdd64d3347b2a1e479cb254f514a40b2c85482eba6bd249c3ced79309ac85"} Oct 13 22:04:18 crc kubenswrapper[4689]: I1013 22:04:18.468031 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2c2gw" event={"ID":"d82b8e46-35f1-4663-b9b2-291d73783c47","Type":"ContainerDied","Data":"3bc8d4a49b438d3afb3cf7448a86f59803cfd94c637c7ea4ca313031e84cec8d"} Oct 13 22:04:18 crc kubenswrapper[4689]: I1013 22:04:18.468053 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3bc8d4a49b438d3afb3cf7448a86f59803cfd94c637c7ea4ca313031e84cec8d" Oct 13 22:04:18 crc kubenswrapper[4689]: I1013 22:04:18.547774 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2c2gw" Oct 13 22:04:18 crc kubenswrapper[4689]: I1013 22:04:18.726827 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82b8e46-35f1-4663-b9b2-291d73783c47-catalog-content\") pod \"d82b8e46-35f1-4663-b9b2-291d73783c47\" (UID: \"d82b8e46-35f1-4663-b9b2-291d73783c47\") " Oct 13 22:04:18 crc kubenswrapper[4689]: I1013 22:04:18.726904 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dx2sg\" (UniqueName: \"kubernetes.io/projected/d82b8e46-35f1-4663-b9b2-291d73783c47-kube-api-access-dx2sg\") pod \"d82b8e46-35f1-4663-b9b2-291d73783c47\" (UID: \"d82b8e46-35f1-4663-b9b2-291d73783c47\") " Oct 13 22:04:18 crc kubenswrapper[4689]: I1013 22:04:18.726937 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82b8e46-35f1-4663-b9b2-291d73783c47-utilities\") pod \"d82b8e46-35f1-4663-b9b2-291d73783c47\" (UID: \"d82b8e46-35f1-4663-b9b2-291d73783c47\") " Oct 13 22:04:18 crc kubenswrapper[4689]: I1013 22:04:18.727844 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d82b8e46-35f1-4663-b9b2-291d73783c47-utilities" (OuterVolumeSpecName: "utilities") pod "d82b8e46-35f1-4663-b9b2-291d73783c47" (UID: "d82b8e46-35f1-4663-b9b2-291d73783c47"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:04:18 crc kubenswrapper[4689]: I1013 22:04:18.738241 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d82b8e46-35f1-4663-b9b2-291d73783c47-kube-api-access-dx2sg" (OuterVolumeSpecName: "kube-api-access-dx2sg") pod "d82b8e46-35f1-4663-b9b2-291d73783c47" (UID: "d82b8e46-35f1-4663-b9b2-291d73783c47"). InnerVolumeSpecName "kube-api-access-dx2sg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:04:18 crc kubenswrapper[4689]: I1013 22:04:18.745159 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d82b8e46-35f1-4663-b9b2-291d73783c47-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d82b8e46-35f1-4663-b9b2-291d73783c47" (UID: "d82b8e46-35f1-4663-b9b2-291d73783c47"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:04:18 crc kubenswrapper[4689]: I1013 22:04:18.828186 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d82b8e46-35f1-4663-b9b2-291d73783c47-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 22:04:18 crc kubenswrapper[4689]: I1013 22:04:18.828641 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dx2sg\" (UniqueName: \"kubernetes.io/projected/d82b8e46-35f1-4663-b9b2-291d73783c47-kube-api-access-dx2sg\") on node \"crc\" DevicePath \"\"" Oct 13 22:04:18 crc kubenswrapper[4689]: I1013 22:04:18.828724 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d82b8e46-35f1-4663-b9b2-291d73783c47-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 22:04:19 crc kubenswrapper[4689]: I1013 22:04:19.475838 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2c2gw" Oct 13 22:04:19 crc kubenswrapper[4689]: I1013 22:04:19.508891 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2c2gw"] Oct 13 22:04:19 crc kubenswrapper[4689]: I1013 22:04:19.518637 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2c2gw"] Oct 13 22:04:19 crc kubenswrapper[4689]: I1013 22:04:19.878189 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d82b8e46-35f1-4663-b9b2-291d73783c47" path="/var/lib/kubelet/pods/d82b8e46-35f1-4663-b9b2-291d73783c47/volumes" Oct 13 22:04:26 crc kubenswrapper[4689]: I1013 22:04:26.868283 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:04:26 crc kubenswrapper[4689]: E1013 22:04:26.869276 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:04:37 crc kubenswrapper[4689]: I1013 22:04:37.868121 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:04:37 crc kubenswrapper[4689]: E1013 22:04:37.868861 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:04:52 crc kubenswrapper[4689]: I1013 22:04:52.867443 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:04:52 crc kubenswrapper[4689]: E1013 22:04:52.869555 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:05:06 crc kubenswrapper[4689]: I1013 22:05:06.868163 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:05:06 crc kubenswrapper[4689]: E1013 22:05:06.869512 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:05:19 crc kubenswrapper[4689]: I1013 22:05:19.867398 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:05:19 crc kubenswrapper[4689]: E1013 22:05:19.868066 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:05:31 crc kubenswrapper[4689]: I1013 22:05:31.868232 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:05:31 crc kubenswrapper[4689]: E1013 22:05:31.869141 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:05:44 crc kubenswrapper[4689]: I1013 22:05:44.867435 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:05:44 crc kubenswrapper[4689]: E1013 22:05:44.868088 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:05:55 crc kubenswrapper[4689]: I1013 22:05:55.868449 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:05:55 crc kubenswrapper[4689]: E1013 22:05:55.870328 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:06:06 crc kubenswrapper[4689]: I1013 22:06:06.867044 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:06:06 crc kubenswrapper[4689]: E1013 22:06:06.867789 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:06:18 crc kubenswrapper[4689]: I1013 22:06:18.868309 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:06:18 crc kubenswrapper[4689]: E1013 22:06:18.869105 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:06:29 crc kubenswrapper[4689]: I1013 22:06:29.867931 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:06:29 crc kubenswrapper[4689]: E1013 22:06:29.868694 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:06:44 crc kubenswrapper[4689]: I1013 22:06:44.867620 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:06:44 crc kubenswrapper[4689]: E1013 22:06:44.868307 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:06:59 crc kubenswrapper[4689]: I1013 22:06:59.867688 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:06:59 crc kubenswrapper[4689]: E1013 22:06:59.868487 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:07:14 crc kubenswrapper[4689]: I1013 22:07:14.866870 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:07:14 crc kubenswrapper[4689]: E1013 22:07:14.867672 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:07:26 crc kubenswrapper[4689]: I1013 22:07:26.868065 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:07:26 crc kubenswrapper[4689]: E1013 22:07:26.868758 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:07:39 crc kubenswrapper[4689]: I1013 22:07:39.867343 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:07:39 crc kubenswrapper[4689]: E1013 22:07:39.868166 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:07:53 crc kubenswrapper[4689]: I1013 22:07:53.872902 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:07:53 crc kubenswrapper[4689]: E1013 22:07:53.874767 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:07:58 crc kubenswrapper[4689]: I1013 22:07:58.387251 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cxzfq"] Oct 13 22:07:58 crc kubenswrapper[4689]: E1013 22:07:58.388765 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d82b8e46-35f1-4663-b9b2-291d73783c47" containerName="extract-content" Oct 13 22:07:58 crc kubenswrapper[4689]: I1013 22:07:58.388788 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d82b8e46-35f1-4663-b9b2-291d73783c47" containerName="extract-content" Oct 13 22:07:58 crc kubenswrapper[4689]: E1013 22:07:58.388808 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d82b8e46-35f1-4663-b9b2-291d73783c47" containerName="extract-utilities" Oct 13 22:07:58 crc kubenswrapper[4689]: I1013 22:07:58.388820 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d82b8e46-35f1-4663-b9b2-291d73783c47" containerName="extract-utilities" Oct 13 22:07:58 crc kubenswrapper[4689]: E1013 22:07:58.388852 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d82b8e46-35f1-4663-b9b2-291d73783c47" containerName="registry-server" Oct 13 22:07:58 crc kubenswrapper[4689]: I1013 22:07:58.388862 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="d82b8e46-35f1-4663-b9b2-291d73783c47" containerName="registry-server" Oct 13 22:07:58 crc kubenswrapper[4689]: I1013 22:07:58.389207 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="d82b8e46-35f1-4663-b9b2-291d73783c47" containerName="registry-server" Oct 13 22:07:58 crc kubenswrapper[4689]: I1013 22:07:58.391646 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cxzfq" Oct 13 22:07:58 crc kubenswrapper[4689]: I1013 22:07:58.407072 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cxzfq"] Oct 13 22:07:58 crc kubenswrapper[4689]: I1013 22:07:58.550032 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-catalog-content\") pod \"community-operators-cxzfq\" (UID: \"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb\") " pod="openshift-marketplace/community-operators-cxzfq" Oct 13 22:07:58 crc kubenswrapper[4689]: I1013 22:07:58.550120 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-utilities\") pod \"community-operators-cxzfq\" (UID: \"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb\") " pod="openshift-marketplace/community-operators-cxzfq" Oct 13 22:07:58 crc kubenswrapper[4689]: I1013 22:07:58.550333 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kn2dp\" (UniqueName: \"kubernetes.io/projected/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-kube-api-access-kn2dp\") pod \"community-operators-cxzfq\" (UID: \"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb\") " pod="openshift-marketplace/community-operators-cxzfq" Oct 13 22:07:58 crc kubenswrapper[4689]: I1013 22:07:58.651980 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-catalog-content\") pod \"community-operators-cxzfq\" (UID: \"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb\") " pod="openshift-marketplace/community-operators-cxzfq" Oct 13 22:07:58 crc kubenswrapper[4689]: I1013 22:07:58.652063 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-utilities\") pod \"community-operators-cxzfq\" (UID: \"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb\") " pod="openshift-marketplace/community-operators-cxzfq" Oct 13 22:07:58 crc kubenswrapper[4689]: I1013 22:07:58.652120 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kn2dp\" (UniqueName: \"kubernetes.io/projected/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-kube-api-access-kn2dp\") pod \"community-operators-cxzfq\" (UID: \"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb\") " pod="openshift-marketplace/community-operators-cxzfq" Oct 13 22:07:58 crc kubenswrapper[4689]: I1013 22:07:58.652633 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-catalog-content\") pod \"community-operators-cxzfq\" (UID: \"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb\") " pod="openshift-marketplace/community-operators-cxzfq" Oct 13 22:07:58 crc kubenswrapper[4689]: I1013 22:07:58.652714 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-utilities\") pod \"community-operators-cxzfq\" (UID: \"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb\") " pod="openshift-marketplace/community-operators-cxzfq" Oct 13 22:07:58 crc kubenswrapper[4689]: I1013 22:07:58.681051 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kn2dp\" (UniqueName: \"kubernetes.io/projected/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-kube-api-access-kn2dp\") pod \"community-operators-cxzfq\" (UID: \"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb\") " pod="openshift-marketplace/community-operators-cxzfq" Oct 13 22:07:58 crc kubenswrapper[4689]: I1013 22:07:58.720719 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cxzfq" Oct 13 22:07:59 crc kubenswrapper[4689]: I1013 22:07:59.280307 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cxzfq"] Oct 13 22:07:59 crc kubenswrapper[4689]: W1013 22:07:59.284647 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode21bb13a_d3f1_4f2d_8bfb_e576aa903bcb.slice/crio-38b80279a0bdf31ab95e7474d91495e8f84228513e42f3fe4d3c02906336dfb2 WatchSource:0}: Error finding container 38b80279a0bdf31ab95e7474d91495e8f84228513e42f3fe4d3c02906336dfb2: Status 404 returned error can't find the container with id 38b80279a0bdf31ab95e7474d91495e8f84228513e42f3fe4d3c02906336dfb2 Oct 13 22:07:59 crc kubenswrapper[4689]: I1013 22:07:59.523743 4689 generic.go:334] "Generic (PLEG): container finished" podID="e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb" containerID="2cef5c75d6e16d675defaa2795edb222bd06680639d038979bee1f920142191a" exitCode=0 Oct 13 22:07:59 crc kubenswrapper[4689]: I1013 22:07:59.523791 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cxzfq" event={"ID":"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb","Type":"ContainerDied","Data":"2cef5c75d6e16d675defaa2795edb222bd06680639d038979bee1f920142191a"} Oct 13 22:07:59 crc kubenswrapper[4689]: I1013 22:07:59.524026 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cxzfq" event={"ID":"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb","Type":"ContainerStarted","Data":"38b80279a0bdf31ab95e7474d91495e8f84228513e42f3fe4d3c02906336dfb2"} Oct 13 22:08:00 crc kubenswrapper[4689]: I1013 22:08:00.536444 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cxzfq" event={"ID":"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb","Type":"ContainerStarted","Data":"c5c1becf8db7ef5584066bd2c2094f0e5c91f1218ee06e7c26b36994fc0c06b7"} Oct 13 22:08:01 crc kubenswrapper[4689]: I1013 22:08:01.557576 4689 generic.go:334] "Generic (PLEG): container finished" podID="e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb" containerID="c5c1becf8db7ef5584066bd2c2094f0e5c91f1218ee06e7c26b36994fc0c06b7" exitCode=0 Oct 13 22:08:01 crc kubenswrapper[4689]: I1013 22:08:01.557725 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cxzfq" event={"ID":"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb","Type":"ContainerDied","Data":"c5c1becf8db7ef5584066bd2c2094f0e5c91f1218ee06e7c26b36994fc0c06b7"} Oct 13 22:08:02 crc kubenswrapper[4689]: I1013 22:08:02.570737 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cxzfq" event={"ID":"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb","Type":"ContainerStarted","Data":"dd26b15982c9a8bc73f70fbd8a09538fd1b1f77ff30bb7a3fea6a481a5f0e85b"} Oct 13 22:08:02 crc kubenswrapper[4689]: I1013 22:08:02.591814 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cxzfq" podStartSLOduration=2.162712135 podStartE2EDuration="4.591794886s" podCreationTimestamp="2025-10-13 22:07:58 +0000 UTC" firstStartedPulling="2025-10-13 22:07:59.527202311 +0000 UTC m=+3396.445447406" lastFinishedPulling="2025-10-13 22:08:01.956285062 +0000 UTC m=+3398.874530157" observedRunningTime="2025-10-13 22:08:02.590135606 +0000 UTC m=+3399.508380691" watchObservedRunningTime="2025-10-13 22:08:02.591794886 +0000 UTC m=+3399.510039961" Oct 13 22:08:07 crc kubenswrapper[4689]: I1013 22:08:07.868558 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:08:07 crc kubenswrapper[4689]: E1013 22:08:07.869269 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:08:08 crc kubenswrapper[4689]: I1013 22:08:08.721218 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cxzfq" Oct 13 22:08:08 crc kubenswrapper[4689]: I1013 22:08:08.721761 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cxzfq" Oct 13 22:08:08 crc kubenswrapper[4689]: I1013 22:08:08.793173 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cxzfq" Oct 13 22:08:09 crc kubenswrapper[4689]: I1013 22:08:09.715684 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cxzfq" Oct 13 22:08:09 crc kubenswrapper[4689]: I1013 22:08:09.777547 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cxzfq"] Oct 13 22:08:11 crc kubenswrapper[4689]: I1013 22:08:11.648539 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cxzfq" podUID="e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb" containerName="registry-server" containerID="cri-o://dd26b15982c9a8bc73f70fbd8a09538fd1b1f77ff30bb7a3fea6a481a5f0e85b" gracePeriod=2 Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.179321 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cxzfq" Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.310627 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-catalog-content\") pod \"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb\" (UID: \"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb\") " Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.310686 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-utilities\") pod \"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb\" (UID: \"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb\") " Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.310746 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kn2dp\" (UniqueName: \"kubernetes.io/projected/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-kube-api-access-kn2dp\") pod \"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb\" (UID: \"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb\") " Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.311762 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-utilities" (OuterVolumeSpecName: "utilities") pod "e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb" (UID: "e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.318839 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-kube-api-access-kn2dp" (OuterVolumeSpecName: "kube-api-access-kn2dp") pod "e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb" (UID: "e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb"). InnerVolumeSpecName "kube-api-access-kn2dp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.353200 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb" (UID: "e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.413132 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.413167 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.413178 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kn2dp\" (UniqueName: \"kubernetes.io/projected/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb-kube-api-access-kn2dp\") on node \"crc\" DevicePath \"\"" Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.659907 4689 generic.go:334] "Generic (PLEG): container finished" podID="e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb" containerID="dd26b15982c9a8bc73f70fbd8a09538fd1b1f77ff30bb7a3fea6a481a5f0e85b" exitCode=0 Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.659961 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cxzfq" event={"ID":"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb","Type":"ContainerDied","Data":"dd26b15982c9a8bc73f70fbd8a09538fd1b1f77ff30bb7a3fea6a481a5f0e85b"} Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.660871 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cxzfq" event={"ID":"e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb","Type":"ContainerDied","Data":"38b80279a0bdf31ab95e7474d91495e8f84228513e42f3fe4d3c02906336dfb2"} Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.660949 4689 scope.go:117] "RemoveContainer" containerID="dd26b15982c9a8bc73f70fbd8a09538fd1b1f77ff30bb7a3fea6a481a5f0e85b" Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.659989 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cxzfq" Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.692528 4689 scope.go:117] "RemoveContainer" containerID="c5c1becf8db7ef5584066bd2c2094f0e5c91f1218ee06e7c26b36994fc0c06b7" Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.697891 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cxzfq"] Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.706111 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cxzfq"] Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.715827 4689 scope.go:117] "RemoveContainer" containerID="2cef5c75d6e16d675defaa2795edb222bd06680639d038979bee1f920142191a" Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.761453 4689 scope.go:117] "RemoveContainer" containerID="dd26b15982c9a8bc73f70fbd8a09538fd1b1f77ff30bb7a3fea6a481a5f0e85b" Oct 13 22:08:12 crc kubenswrapper[4689]: E1013 22:08:12.762038 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd26b15982c9a8bc73f70fbd8a09538fd1b1f77ff30bb7a3fea6a481a5f0e85b\": container with ID starting with dd26b15982c9a8bc73f70fbd8a09538fd1b1f77ff30bb7a3fea6a481a5f0e85b not found: ID does not exist" containerID="dd26b15982c9a8bc73f70fbd8a09538fd1b1f77ff30bb7a3fea6a481a5f0e85b" Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.762070 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd26b15982c9a8bc73f70fbd8a09538fd1b1f77ff30bb7a3fea6a481a5f0e85b"} err="failed to get container status \"dd26b15982c9a8bc73f70fbd8a09538fd1b1f77ff30bb7a3fea6a481a5f0e85b\": rpc error: code = NotFound desc = could not find container \"dd26b15982c9a8bc73f70fbd8a09538fd1b1f77ff30bb7a3fea6a481a5f0e85b\": container with ID starting with dd26b15982c9a8bc73f70fbd8a09538fd1b1f77ff30bb7a3fea6a481a5f0e85b not found: ID does not exist" Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.762092 4689 scope.go:117] "RemoveContainer" containerID="c5c1becf8db7ef5584066bd2c2094f0e5c91f1218ee06e7c26b36994fc0c06b7" Oct 13 22:08:12 crc kubenswrapper[4689]: E1013 22:08:12.762312 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5c1becf8db7ef5584066bd2c2094f0e5c91f1218ee06e7c26b36994fc0c06b7\": container with ID starting with c5c1becf8db7ef5584066bd2c2094f0e5c91f1218ee06e7c26b36994fc0c06b7 not found: ID does not exist" containerID="c5c1becf8db7ef5584066bd2c2094f0e5c91f1218ee06e7c26b36994fc0c06b7" Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.762349 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5c1becf8db7ef5584066bd2c2094f0e5c91f1218ee06e7c26b36994fc0c06b7"} err="failed to get container status \"c5c1becf8db7ef5584066bd2c2094f0e5c91f1218ee06e7c26b36994fc0c06b7\": rpc error: code = NotFound desc = could not find container \"c5c1becf8db7ef5584066bd2c2094f0e5c91f1218ee06e7c26b36994fc0c06b7\": container with ID starting with c5c1becf8db7ef5584066bd2c2094f0e5c91f1218ee06e7c26b36994fc0c06b7 not found: ID does not exist" Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.762367 4689 scope.go:117] "RemoveContainer" containerID="2cef5c75d6e16d675defaa2795edb222bd06680639d038979bee1f920142191a" Oct 13 22:08:12 crc kubenswrapper[4689]: E1013 22:08:12.762561 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2cef5c75d6e16d675defaa2795edb222bd06680639d038979bee1f920142191a\": container with ID starting with 2cef5c75d6e16d675defaa2795edb222bd06680639d038979bee1f920142191a not found: ID does not exist" containerID="2cef5c75d6e16d675defaa2795edb222bd06680639d038979bee1f920142191a" Oct 13 22:08:12 crc kubenswrapper[4689]: I1013 22:08:12.762582 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2cef5c75d6e16d675defaa2795edb222bd06680639d038979bee1f920142191a"} err="failed to get container status \"2cef5c75d6e16d675defaa2795edb222bd06680639d038979bee1f920142191a\": rpc error: code = NotFound desc = could not find container \"2cef5c75d6e16d675defaa2795edb222bd06680639d038979bee1f920142191a\": container with ID starting with 2cef5c75d6e16d675defaa2795edb222bd06680639d038979bee1f920142191a not found: ID does not exist" Oct 13 22:08:13 crc kubenswrapper[4689]: I1013 22:08:13.892412 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb" path="/var/lib/kubelet/pods/e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb/volumes" Oct 13 22:08:21 crc kubenswrapper[4689]: I1013 22:08:21.840493 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hzcrl"] Oct 13 22:08:21 crc kubenswrapper[4689]: E1013 22:08:21.842423 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb" containerName="extract-utilities" Oct 13 22:08:21 crc kubenswrapper[4689]: I1013 22:08:21.842452 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb" containerName="extract-utilities" Oct 13 22:08:21 crc kubenswrapper[4689]: E1013 22:08:21.842494 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb" containerName="extract-content" Oct 13 22:08:21 crc kubenswrapper[4689]: I1013 22:08:21.842504 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb" containerName="extract-content" Oct 13 22:08:21 crc kubenswrapper[4689]: E1013 22:08:21.842533 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb" containerName="registry-server" Oct 13 22:08:21 crc kubenswrapper[4689]: I1013 22:08:21.842542 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb" containerName="registry-server" Oct 13 22:08:21 crc kubenswrapper[4689]: I1013 22:08:21.842861 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="e21bb13a-d3f1-4f2d-8bfb-e576aa903bcb" containerName="registry-server" Oct 13 22:08:21 crc kubenswrapper[4689]: I1013 22:08:21.844801 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hzcrl" Oct 13 22:08:21 crc kubenswrapper[4689]: I1013 22:08:21.868553 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:08:21 crc kubenswrapper[4689]: E1013 22:08:21.868894 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:08:21 crc kubenswrapper[4689]: I1013 22:08:21.880073 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hzcrl"] Oct 13 22:08:21 crc kubenswrapper[4689]: I1013 22:08:21.929867 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/305a4e8c-90fe-45ba-866f-d165b28b2cea-catalog-content\") pod \"redhat-operators-hzcrl\" (UID: \"305a4e8c-90fe-45ba-866f-d165b28b2cea\") " pod="openshift-marketplace/redhat-operators-hzcrl" Oct 13 22:08:21 crc kubenswrapper[4689]: I1013 22:08:21.930082 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/305a4e8c-90fe-45ba-866f-d165b28b2cea-utilities\") pod \"redhat-operators-hzcrl\" (UID: \"305a4e8c-90fe-45ba-866f-d165b28b2cea\") " pod="openshift-marketplace/redhat-operators-hzcrl" Oct 13 22:08:21 crc kubenswrapper[4689]: I1013 22:08:21.930161 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4dr8\" (UniqueName: \"kubernetes.io/projected/305a4e8c-90fe-45ba-866f-d165b28b2cea-kube-api-access-t4dr8\") pod \"redhat-operators-hzcrl\" (UID: \"305a4e8c-90fe-45ba-866f-d165b28b2cea\") " pod="openshift-marketplace/redhat-operators-hzcrl" Oct 13 22:08:22 crc kubenswrapper[4689]: I1013 22:08:22.032094 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/305a4e8c-90fe-45ba-866f-d165b28b2cea-utilities\") pod \"redhat-operators-hzcrl\" (UID: \"305a4e8c-90fe-45ba-866f-d165b28b2cea\") " pod="openshift-marketplace/redhat-operators-hzcrl" Oct 13 22:08:22 crc kubenswrapper[4689]: I1013 22:08:22.032213 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4dr8\" (UniqueName: \"kubernetes.io/projected/305a4e8c-90fe-45ba-866f-d165b28b2cea-kube-api-access-t4dr8\") pod \"redhat-operators-hzcrl\" (UID: \"305a4e8c-90fe-45ba-866f-d165b28b2cea\") " pod="openshift-marketplace/redhat-operators-hzcrl" Oct 13 22:08:22 crc kubenswrapper[4689]: I1013 22:08:22.032336 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/305a4e8c-90fe-45ba-866f-d165b28b2cea-catalog-content\") pod \"redhat-operators-hzcrl\" (UID: \"305a4e8c-90fe-45ba-866f-d165b28b2cea\") " pod="openshift-marketplace/redhat-operators-hzcrl" Oct 13 22:08:22 crc kubenswrapper[4689]: I1013 22:08:22.032645 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/305a4e8c-90fe-45ba-866f-d165b28b2cea-utilities\") pod \"redhat-operators-hzcrl\" (UID: \"305a4e8c-90fe-45ba-866f-d165b28b2cea\") " pod="openshift-marketplace/redhat-operators-hzcrl" Oct 13 22:08:22 crc kubenswrapper[4689]: I1013 22:08:22.033194 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/305a4e8c-90fe-45ba-866f-d165b28b2cea-catalog-content\") pod \"redhat-operators-hzcrl\" (UID: \"305a4e8c-90fe-45ba-866f-d165b28b2cea\") " pod="openshift-marketplace/redhat-operators-hzcrl" Oct 13 22:08:22 crc kubenswrapper[4689]: I1013 22:08:22.055322 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4dr8\" (UniqueName: \"kubernetes.io/projected/305a4e8c-90fe-45ba-866f-d165b28b2cea-kube-api-access-t4dr8\") pod \"redhat-operators-hzcrl\" (UID: \"305a4e8c-90fe-45ba-866f-d165b28b2cea\") " pod="openshift-marketplace/redhat-operators-hzcrl" Oct 13 22:08:22 crc kubenswrapper[4689]: I1013 22:08:22.165511 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hzcrl" Oct 13 22:08:22 crc kubenswrapper[4689]: I1013 22:08:22.621825 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hzcrl"] Oct 13 22:08:22 crc kubenswrapper[4689]: I1013 22:08:22.767354 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hzcrl" event={"ID":"305a4e8c-90fe-45ba-866f-d165b28b2cea","Type":"ContainerStarted","Data":"1389ce486141e2e76fc00116be15b92dc0ed2ce1ea15fdb73e9fd03a1513e17d"} Oct 13 22:08:23 crc kubenswrapper[4689]: I1013 22:08:23.781337 4689 generic.go:334] "Generic (PLEG): container finished" podID="305a4e8c-90fe-45ba-866f-d165b28b2cea" containerID="b1457d2b310e31b9c63b43c8d6de78449501af4ae9c96eabcda562dcfb7ff8f5" exitCode=0 Oct 13 22:08:23 crc kubenswrapper[4689]: I1013 22:08:23.781404 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hzcrl" event={"ID":"305a4e8c-90fe-45ba-866f-d165b28b2cea","Type":"ContainerDied","Data":"b1457d2b310e31b9c63b43c8d6de78449501af4ae9c96eabcda562dcfb7ff8f5"} Oct 13 22:08:25 crc kubenswrapper[4689]: I1013 22:08:25.803138 4689 generic.go:334] "Generic (PLEG): container finished" podID="305a4e8c-90fe-45ba-866f-d165b28b2cea" containerID="2421608aae116b98b2153f4132e1ca30d8148d1d3992a907b725e1e7f430202c" exitCode=0 Oct 13 22:08:25 crc kubenswrapper[4689]: I1013 22:08:25.803251 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hzcrl" event={"ID":"305a4e8c-90fe-45ba-866f-d165b28b2cea","Type":"ContainerDied","Data":"2421608aae116b98b2153f4132e1ca30d8148d1d3992a907b725e1e7f430202c"} Oct 13 22:08:26 crc kubenswrapper[4689]: I1013 22:08:26.817748 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hzcrl" event={"ID":"305a4e8c-90fe-45ba-866f-d165b28b2cea","Type":"ContainerStarted","Data":"f7260aab69bf219a1235fe8e2f4bf23177f57f6c049ab7dc6b961f9e13e7a4b1"} Oct 13 22:08:26 crc kubenswrapper[4689]: I1013 22:08:26.843878 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hzcrl" podStartSLOduration=3.190506079 podStartE2EDuration="5.843853131s" podCreationTimestamp="2025-10-13 22:08:21 +0000 UTC" firstStartedPulling="2025-10-13 22:08:23.783887566 +0000 UTC m=+3420.702132651" lastFinishedPulling="2025-10-13 22:08:26.437234608 +0000 UTC m=+3423.355479703" observedRunningTime="2025-10-13 22:08:26.836834954 +0000 UTC m=+3423.755080049" watchObservedRunningTime="2025-10-13 22:08:26.843853131 +0000 UTC m=+3423.762098226" Oct 13 22:08:32 crc kubenswrapper[4689]: I1013 22:08:32.166656 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hzcrl" Oct 13 22:08:32 crc kubenswrapper[4689]: I1013 22:08:32.167576 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hzcrl" Oct 13 22:08:32 crc kubenswrapper[4689]: I1013 22:08:32.212897 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hzcrl" Oct 13 22:08:32 crc kubenswrapper[4689]: I1013 22:08:32.925659 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hzcrl" Oct 13 22:08:32 crc kubenswrapper[4689]: I1013 22:08:32.969391 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hzcrl"] Oct 13 22:08:33 crc kubenswrapper[4689]: I1013 22:08:33.873636 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:08:34 crc kubenswrapper[4689]: I1013 22:08:34.914349 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerStarted","Data":"8308c97505954cfd53e34046f29d414baee75c943899bf62fc757653698c4248"} Oct 13 22:08:34 crc kubenswrapper[4689]: I1013 22:08:34.914777 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hzcrl" podUID="305a4e8c-90fe-45ba-866f-d165b28b2cea" containerName="registry-server" containerID="cri-o://f7260aab69bf219a1235fe8e2f4bf23177f57f6c049ab7dc6b961f9e13e7a4b1" gracePeriod=2 Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.463282 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hzcrl" Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.603508 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4dr8\" (UniqueName: \"kubernetes.io/projected/305a4e8c-90fe-45ba-866f-d165b28b2cea-kube-api-access-t4dr8\") pod \"305a4e8c-90fe-45ba-866f-d165b28b2cea\" (UID: \"305a4e8c-90fe-45ba-866f-d165b28b2cea\") " Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.603721 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/305a4e8c-90fe-45ba-866f-d165b28b2cea-utilities\") pod \"305a4e8c-90fe-45ba-866f-d165b28b2cea\" (UID: \"305a4e8c-90fe-45ba-866f-d165b28b2cea\") " Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.603793 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/305a4e8c-90fe-45ba-866f-d165b28b2cea-catalog-content\") pod \"305a4e8c-90fe-45ba-866f-d165b28b2cea\" (UID: \"305a4e8c-90fe-45ba-866f-d165b28b2cea\") " Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.604520 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/305a4e8c-90fe-45ba-866f-d165b28b2cea-utilities" (OuterVolumeSpecName: "utilities") pod "305a4e8c-90fe-45ba-866f-d165b28b2cea" (UID: "305a4e8c-90fe-45ba-866f-d165b28b2cea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.612668 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/305a4e8c-90fe-45ba-866f-d165b28b2cea-kube-api-access-t4dr8" (OuterVolumeSpecName: "kube-api-access-t4dr8") pod "305a4e8c-90fe-45ba-866f-d165b28b2cea" (UID: "305a4e8c-90fe-45ba-866f-d165b28b2cea"). InnerVolumeSpecName "kube-api-access-t4dr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.695273 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/305a4e8c-90fe-45ba-866f-d165b28b2cea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "305a4e8c-90fe-45ba-866f-d165b28b2cea" (UID: "305a4e8c-90fe-45ba-866f-d165b28b2cea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.705667 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/305a4e8c-90fe-45ba-866f-d165b28b2cea-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.705696 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/305a4e8c-90fe-45ba-866f-d165b28b2cea-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.705708 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4dr8\" (UniqueName: \"kubernetes.io/projected/305a4e8c-90fe-45ba-866f-d165b28b2cea-kube-api-access-t4dr8\") on node \"crc\" DevicePath \"\"" Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.925821 4689 generic.go:334] "Generic (PLEG): container finished" podID="305a4e8c-90fe-45ba-866f-d165b28b2cea" containerID="f7260aab69bf219a1235fe8e2f4bf23177f57f6c049ab7dc6b961f9e13e7a4b1" exitCode=0 Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.925867 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hzcrl" event={"ID":"305a4e8c-90fe-45ba-866f-d165b28b2cea","Type":"ContainerDied","Data":"f7260aab69bf219a1235fe8e2f4bf23177f57f6c049ab7dc6b961f9e13e7a4b1"} Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.925893 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hzcrl" event={"ID":"305a4e8c-90fe-45ba-866f-d165b28b2cea","Type":"ContainerDied","Data":"1389ce486141e2e76fc00116be15b92dc0ed2ce1ea15fdb73e9fd03a1513e17d"} Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.925912 4689 scope.go:117] "RemoveContainer" containerID="f7260aab69bf219a1235fe8e2f4bf23177f57f6c049ab7dc6b961f9e13e7a4b1" Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.926267 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hzcrl" Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.958693 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hzcrl"] Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.962679 4689 scope.go:117] "RemoveContainer" containerID="2421608aae116b98b2153f4132e1ca30d8148d1d3992a907b725e1e7f430202c" Oct 13 22:08:35 crc kubenswrapper[4689]: I1013 22:08:35.967496 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hzcrl"] Oct 13 22:08:36 crc kubenswrapper[4689]: I1013 22:08:36.002987 4689 scope.go:117] "RemoveContainer" containerID="b1457d2b310e31b9c63b43c8d6de78449501af4ae9c96eabcda562dcfb7ff8f5" Oct 13 22:08:36 crc kubenswrapper[4689]: I1013 22:08:36.046897 4689 scope.go:117] "RemoveContainer" containerID="f7260aab69bf219a1235fe8e2f4bf23177f57f6c049ab7dc6b961f9e13e7a4b1" Oct 13 22:08:36 crc kubenswrapper[4689]: E1013 22:08:36.049742 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7260aab69bf219a1235fe8e2f4bf23177f57f6c049ab7dc6b961f9e13e7a4b1\": container with ID starting with f7260aab69bf219a1235fe8e2f4bf23177f57f6c049ab7dc6b961f9e13e7a4b1 not found: ID does not exist" containerID="f7260aab69bf219a1235fe8e2f4bf23177f57f6c049ab7dc6b961f9e13e7a4b1" Oct 13 22:08:36 crc kubenswrapper[4689]: I1013 22:08:36.049789 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7260aab69bf219a1235fe8e2f4bf23177f57f6c049ab7dc6b961f9e13e7a4b1"} err="failed to get container status \"f7260aab69bf219a1235fe8e2f4bf23177f57f6c049ab7dc6b961f9e13e7a4b1\": rpc error: code = NotFound desc = could not find container \"f7260aab69bf219a1235fe8e2f4bf23177f57f6c049ab7dc6b961f9e13e7a4b1\": container with ID starting with f7260aab69bf219a1235fe8e2f4bf23177f57f6c049ab7dc6b961f9e13e7a4b1 not found: ID does not exist" Oct 13 22:08:36 crc kubenswrapper[4689]: I1013 22:08:36.049821 4689 scope.go:117] "RemoveContainer" containerID="2421608aae116b98b2153f4132e1ca30d8148d1d3992a907b725e1e7f430202c" Oct 13 22:08:36 crc kubenswrapper[4689]: E1013 22:08:36.050459 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2421608aae116b98b2153f4132e1ca30d8148d1d3992a907b725e1e7f430202c\": container with ID starting with 2421608aae116b98b2153f4132e1ca30d8148d1d3992a907b725e1e7f430202c not found: ID does not exist" containerID="2421608aae116b98b2153f4132e1ca30d8148d1d3992a907b725e1e7f430202c" Oct 13 22:08:36 crc kubenswrapper[4689]: I1013 22:08:36.050488 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2421608aae116b98b2153f4132e1ca30d8148d1d3992a907b725e1e7f430202c"} err="failed to get container status \"2421608aae116b98b2153f4132e1ca30d8148d1d3992a907b725e1e7f430202c\": rpc error: code = NotFound desc = could not find container \"2421608aae116b98b2153f4132e1ca30d8148d1d3992a907b725e1e7f430202c\": container with ID starting with 2421608aae116b98b2153f4132e1ca30d8148d1d3992a907b725e1e7f430202c not found: ID does not exist" Oct 13 22:08:36 crc kubenswrapper[4689]: I1013 22:08:36.050508 4689 scope.go:117] "RemoveContainer" containerID="b1457d2b310e31b9c63b43c8d6de78449501af4ae9c96eabcda562dcfb7ff8f5" Oct 13 22:08:36 crc kubenswrapper[4689]: E1013 22:08:36.050898 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1457d2b310e31b9c63b43c8d6de78449501af4ae9c96eabcda562dcfb7ff8f5\": container with ID starting with b1457d2b310e31b9c63b43c8d6de78449501af4ae9c96eabcda562dcfb7ff8f5 not found: ID does not exist" containerID="b1457d2b310e31b9c63b43c8d6de78449501af4ae9c96eabcda562dcfb7ff8f5" Oct 13 22:08:36 crc kubenswrapper[4689]: I1013 22:08:36.050933 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1457d2b310e31b9c63b43c8d6de78449501af4ae9c96eabcda562dcfb7ff8f5"} err="failed to get container status \"b1457d2b310e31b9c63b43c8d6de78449501af4ae9c96eabcda562dcfb7ff8f5\": rpc error: code = NotFound desc = could not find container \"b1457d2b310e31b9c63b43c8d6de78449501af4ae9c96eabcda562dcfb7ff8f5\": container with ID starting with b1457d2b310e31b9c63b43c8d6de78449501af4ae9c96eabcda562dcfb7ff8f5 not found: ID does not exist" Oct 13 22:08:37 crc kubenswrapper[4689]: I1013 22:08:37.888078 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="305a4e8c-90fe-45ba-866f-d165b28b2cea" path="/var/lib/kubelet/pods/305a4e8c-90fe-45ba-866f-d165b28b2cea/volumes" Oct 13 22:08:49 crc kubenswrapper[4689]: I1013 22:08:49.045056 4689 generic.go:334] "Generic (PLEG): container finished" podID="2fd6769f-1acf-441d-8569-13baec5fcf72" containerID="2ea9061742c4120956c5a652f7815f61a5311ab210ea7056b78a84262cf53ac7" exitCode=0 Oct 13 22:08:49 crc kubenswrapper[4689]: I1013 22:08:49.045171 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2fd6769f-1acf-441d-8569-13baec5fcf72","Type":"ContainerDied","Data":"2ea9061742c4120956c5a652f7815f61a5311ab210ea7056b78a84262cf53ac7"} Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.442774 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.479720 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2fd6769f-1acf-441d-8569-13baec5fcf72-openstack-config\") pod \"2fd6769f-1acf-441d-8569-13baec5fcf72\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.480177 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2fd6769f-1acf-441d-8569-13baec5fcf72-config-data\") pod \"2fd6769f-1acf-441d-8569-13baec5fcf72\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.480278 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-ssh-key\") pod \"2fd6769f-1acf-441d-8569-13baec5fcf72\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.480358 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-ca-certs\") pod \"2fd6769f-1acf-441d-8569-13baec5fcf72\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.480425 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-openstack-config-secret\") pod \"2fd6769f-1acf-441d-8569-13baec5fcf72\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.480456 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"2fd6769f-1acf-441d-8569-13baec5fcf72\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.480505 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6b8t\" (UniqueName: \"kubernetes.io/projected/2fd6769f-1acf-441d-8569-13baec5fcf72-kube-api-access-h6b8t\") pod \"2fd6769f-1acf-441d-8569-13baec5fcf72\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.480581 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2fd6769f-1acf-441d-8569-13baec5fcf72-test-operator-ephemeral-workdir\") pod \"2fd6769f-1acf-441d-8569-13baec5fcf72\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.480663 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2fd6769f-1acf-441d-8569-13baec5fcf72-test-operator-ephemeral-temporary\") pod \"2fd6769f-1acf-441d-8569-13baec5fcf72\" (UID: \"2fd6769f-1acf-441d-8569-13baec5fcf72\") " Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.481820 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fd6769f-1acf-441d-8569-13baec5fcf72-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "2fd6769f-1acf-441d-8569-13baec5fcf72" (UID: "2fd6769f-1acf-441d-8569-13baec5fcf72"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.486994 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fd6769f-1acf-441d-8569-13baec5fcf72-config-data" (OuterVolumeSpecName: "config-data") pod "2fd6769f-1acf-441d-8569-13baec5fcf72" (UID: "2fd6769f-1acf-441d-8569-13baec5fcf72"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.490382 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fd6769f-1acf-441d-8569-13baec5fcf72-kube-api-access-h6b8t" (OuterVolumeSpecName: "kube-api-access-h6b8t") pod "2fd6769f-1acf-441d-8569-13baec5fcf72" (UID: "2fd6769f-1acf-441d-8569-13baec5fcf72"). InnerVolumeSpecName "kube-api-access-h6b8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.490851 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "test-operator-logs") pod "2fd6769f-1acf-441d-8569-13baec5fcf72" (UID: "2fd6769f-1acf-441d-8569-13baec5fcf72"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.495762 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fd6769f-1acf-441d-8569-13baec5fcf72-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "2fd6769f-1acf-441d-8569-13baec5fcf72" (UID: "2fd6769f-1acf-441d-8569-13baec5fcf72"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.516286 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "2fd6769f-1acf-441d-8569-13baec5fcf72" (UID: "2fd6769f-1acf-441d-8569-13baec5fcf72"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.524440 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2fd6769f-1acf-441d-8569-13baec5fcf72" (UID: "2fd6769f-1acf-441d-8569-13baec5fcf72"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.525778 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "2fd6769f-1acf-441d-8569-13baec5fcf72" (UID: "2fd6769f-1acf-441d-8569-13baec5fcf72"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.546369 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fd6769f-1acf-441d-8569-13baec5fcf72-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "2fd6769f-1acf-441d-8569-13baec5fcf72" (UID: "2fd6769f-1acf-441d-8569-13baec5fcf72"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.582757 4689 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2fd6769f-1acf-441d-8569-13baec5fcf72-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.582783 4689 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2fd6769f-1acf-441d-8569-13baec5fcf72-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.582798 4689 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2fd6769f-1acf-441d-8569-13baec5fcf72-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.582817 4689 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2fd6769f-1acf-441d-8569-13baec5fcf72-config-data\") on node \"crc\" DevicePath \"\"" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.582833 4689 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.582844 4689 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.582860 4689 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2fd6769f-1acf-441d-8569-13baec5fcf72-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.582899 4689 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.582912 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6b8t\" (UniqueName: \"kubernetes.io/projected/2fd6769f-1acf-441d-8569-13baec5fcf72-kube-api-access-h6b8t\") on node \"crc\" DevicePath \"\"" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.602942 4689 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Oct 13 22:08:50 crc kubenswrapper[4689]: I1013 22:08:50.684867 4689 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Oct 13 22:08:51 crc kubenswrapper[4689]: I1013 22:08:51.072503 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2fd6769f-1acf-441d-8569-13baec5fcf72","Type":"ContainerDied","Data":"4de41aab0177c9b91cd8456af6d35b84d5df458a9932261968834509ca72402c"} Oct 13 22:08:51 crc kubenswrapper[4689]: I1013 22:08:51.072567 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4de41aab0177c9b91cd8456af6d35b84d5df458a9932261968834509ca72402c" Oct 13 22:08:51 crc kubenswrapper[4689]: I1013 22:08:51.072577 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.586752 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 13 22:09:00 crc kubenswrapper[4689]: E1013 22:09:00.587973 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="305a4e8c-90fe-45ba-866f-d165b28b2cea" containerName="registry-server" Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.587998 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="305a4e8c-90fe-45ba-866f-d165b28b2cea" containerName="registry-server" Oct 13 22:09:00 crc kubenswrapper[4689]: E1013 22:09:00.588031 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="305a4e8c-90fe-45ba-866f-d165b28b2cea" containerName="extract-utilities" Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.588044 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="305a4e8c-90fe-45ba-866f-d165b28b2cea" containerName="extract-utilities" Oct 13 22:09:00 crc kubenswrapper[4689]: E1013 22:09:00.588079 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="305a4e8c-90fe-45ba-866f-d165b28b2cea" containerName="extract-content" Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.588092 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="305a4e8c-90fe-45ba-866f-d165b28b2cea" containerName="extract-content" Oct 13 22:09:00 crc kubenswrapper[4689]: E1013 22:09:00.588145 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fd6769f-1acf-441d-8569-13baec5fcf72" containerName="tempest-tests-tempest-tests-runner" Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.588158 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fd6769f-1acf-441d-8569-13baec5fcf72" containerName="tempest-tests-tempest-tests-runner" Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.588484 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fd6769f-1acf-441d-8569-13baec5fcf72" containerName="tempest-tests-tempest-tests-runner" Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.588536 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="305a4e8c-90fe-45ba-866f-d165b28b2cea" containerName="registry-server" Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.589738 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.592060 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-mb9bl" Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.594516 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.749551 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"618c5ade-6473-4bb8-88b8-f92932517f5e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.749713 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqlfd\" (UniqueName: \"kubernetes.io/projected/618c5ade-6473-4bb8-88b8-f92932517f5e-kube-api-access-zqlfd\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"618c5ade-6473-4bb8-88b8-f92932517f5e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.851145 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqlfd\" (UniqueName: \"kubernetes.io/projected/618c5ade-6473-4bb8-88b8-f92932517f5e-kube-api-access-zqlfd\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"618c5ade-6473-4bb8-88b8-f92932517f5e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.851298 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"618c5ade-6473-4bb8-88b8-f92932517f5e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.851796 4689 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"618c5ade-6473-4bb8-88b8-f92932517f5e\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.889457 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqlfd\" (UniqueName: \"kubernetes.io/projected/618c5ade-6473-4bb8-88b8-f92932517f5e-kube-api-access-zqlfd\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"618c5ade-6473-4bb8-88b8-f92932517f5e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.891379 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"618c5ade-6473-4bb8-88b8-f92932517f5e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 22:09:00 crc kubenswrapper[4689]: I1013 22:09:00.914772 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 13 22:09:01 crc kubenswrapper[4689]: I1013 22:09:01.374927 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 13 22:09:02 crc kubenswrapper[4689]: I1013 22:09:02.166630 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"618c5ade-6473-4bb8-88b8-f92932517f5e","Type":"ContainerStarted","Data":"724434f9bced2e3592372cd14cbbd3d81dafdb84905ab8cec9bc379dd6aacacd"} Oct 13 22:09:03 crc kubenswrapper[4689]: I1013 22:09:03.181993 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"618c5ade-6473-4bb8-88b8-f92932517f5e","Type":"ContainerStarted","Data":"20544241faa2a3c51fa1b1784a3f8242d1110b8cb3f43afd024434ef80d7596e"} Oct 13 22:09:03 crc kubenswrapper[4689]: I1013 22:09:03.200869 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.343175374 podStartE2EDuration="3.200845628s" podCreationTimestamp="2025-10-13 22:09:00 +0000 UTC" firstStartedPulling="2025-10-13 22:09:01.382203423 +0000 UTC m=+3458.300448518" lastFinishedPulling="2025-10-13 22:09:02.239873677 +0000 UTC m=+3459.158118772" observedRunningTime="2025-10-13 22:09:03.195978522 +0000 UTC m=+3460.114223617" watchObservedRunningTime="2025-10-13 22:09:03.200845628 +0000 UTC m=+3460.119090743" Oct 13 22:09:19 crc kubenswrapper[4689]: I1013 22:09:19.358279 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-47qhw/must-gather-rqm5w"] Oct 13 22:09:19 crc kubenswrapper[4689]: I1013 22:09:19.360812 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-47qhw/must-gather-rqm5w" Oct 13 22:09:19 crc kubenswrapper[4689]: I1013 22:09:19.363089 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-47qhw"/"openshift-service-ca.crt" Oct 13 22:09:19 crc kubenswrapper[4689]: I1013 22:09:19.363578 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-47qhw"/"default-dockercfg-8tdng" Oct 13 22:09:19 crc kubenswrapper[4689]: I1013 22:09:19.363782 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-47qhw"/"kube-root-ca.crt" Oct 13 22:09:19 crc kubenswrapper[4689]: I1013 22:09:19.368954 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-47qhw/must-gather-rqm5w"] Oct 13 22:09:19 crc kubenswrapper[4689]: I1013 22:09:19.421086 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlcmm\" (UniqueName: \"kubernetes.io/projected/ee11f646-3a19-441a-abe8-86f5147df7d1-kube-api-access-vlcmm\") pod \"must-gather-rqm5w\" (UID: \"ee11f646-3a19-441a-abe8-86f5147df7d1\") " pod="openshift-must-gather-47qhw/must-gather-rqm5w" Oct 13 22:09:19 crc kubenswrapper[4689]: I1013 22:09:19.421830 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ee11f646-3a19-441a-abe8-86f5147df7d1-must-gather-output\") pod \"must-gather-rqm5w\" (UID: \"ee11f646-3a19-441a-abe8-86f5147df7d1\") " pod="openshift-must-gather-47qhw/must-gather-rqm5w" Oct 13 22:09:19 crc kubenswrapper[4689]: I1013 22:09:19.524122 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlcmm\" (UniqueName: \"kubernetes.io/projected/ee11f646-3a19-441a-abe8-86f5147df7d1-kube-api-access-vlcmm\") pod \"must-gather-rqm5w\" (UID: \"ee11f646-3a19-441a-abe8-86f5147df7d1\") " pod="openshift-must-gather-47qhw/must-gather-rqm5w" Oct 13 22:09:19 crc kubenswrapper[4689]: I1013 22:09:19.524191 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ee11f646-3a19-441a-abe8-86f5147df7d1-must-gather-output\") pod \"must-gather-rqm5w\" (UID: \"ee11f646-3a19-441a-abe8-86f5147df7d1\") " pod="openshift-must-gather-47qhw/must-gather-rqm5w" Oct 13 22:09:19 crc kubenswrapper[4689]: I1013 22:09:19.524940 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ee11f646-3a19-441a-abe8-86f5147df7d1-must-gather-output\") pod \"must-gather-rqm5w\" (UID: \"ee11f646-3a19-441a-abe8-86f5147df7d1\") " pod="openshift-must-gather-47qhw/must-gather-rqm5w" Oct 13 22:09:19 crc kubenswrapper[4689]: I1013 22:09:19.540950 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlcmm\" (UniqueName: \"kubernetes.io/projected/ee11f646-3a19-441a-abe8-86f5147df7d1-kube-api-access-vlcmm\") pod \"must-gather-rqm5w\" (UID: \"ee11f646-3a19-441a-abe8-86f5147df7d1\") " pod="openshift-must-gather-47qhw/must-gather-rqm5w" Oct 13 22:09:19 crc kubenswrapper[4689]: I1013 22:09:19.680356 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-47qhw/must-gather-rqm5w" Oct 13 22:09:20 crc kubenswrapper[4689]: I1013 22:09:20.154991 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-47qhw/must-gather-rqm5w"] Oct 13 22:09:20 crc kubenswrapper[4689]: W1013 22:09:20.168815 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee11f646_3a19_441a_abe8_86f5147df7d1.slice/crio-54be4c3cb4f8c200ff3fbee79cf2a35533cfd1d393215df3ff3215325d62d2a7 WatchSource:0}: Error finding container 54be4c3cb4f8c200ff3fbee79cf2a35533cfd1d393215df3ff3215325d62d2a7: Status 404 returned error can't find the container with id 54be4c3cb4f8c200ff3fbee79cf2a35533cfd1d393215df3ff3215325d62d2a7 Oct 13 22:09:20 crc kubenswrapper[4689]: I1013 22:09:20.170763 4689 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 22:09:20 crc kubenswrapper[4689]: I1013 22:09:20.347359 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-47qhw/must-gather-rqm5w" event={"ID":"ee11f646-3a19-441a-abe8-86f5147df7d1","Type":"ContainerStarted","Data":"54be4c3cb4f8c200ff3fbee79cf2a35533cfd1d393215df3ff3215325d62d2a7"} Oct 13 22:09:24 crc kubenswrapper[4689]: I1013 22:09:24.392254 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-47qhw/must-gather-rqm5w" event={"ID":"ee11f646-3a19-441a-abe8-86f5147df7d1","Type":"ContainerStarted","Data":"ea84b92017234166be1c839b38aa4ba8b1a6524b826b77e49ba9288f38ab29b2"} Oct 13 22:09:24 crc kubenswrapper[4689]: I1013 22:09:24.392515 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-47qhw/must-gather-rqm5w" event={"ID":"ee11f646-3a19-441a-abe8-86f5147df7d1","Type":"ContainerStarted","Data":"150c9f3006da464b4ad794aa19bb0e37ac71bb64ee4fa3ee3b79888d742214e0"} Oct 13 22:09:24 crc kubenswrapper[4689]: I1013 22:09:24.407579 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-47qhw/must-gather-rqm5w" podStartSLOduration=1.666710686 podStartE2EDuration="5.407563858s" podCreationTimestamp="2025-10-13 22:09:19 +0000 UTC" firstStartedPulling="2025-10-13 22:09:20.170424574 +0000 UTC m=+3477.088669659" lastFinishedPulling="2025-10-13 22:09:23.911277746 +0000 UTC m=+3480.829522831" observedRunningTime="2025-10-13 22:09:24.405728074 +0000 UTC m=+3481.323973159" watchObservedRunningTime="2025-10-13 22:09:24.407563858 +0000 UTC m=+3481.325808943" Oct 13 22:09:27 crc kubenswrapper[4689]: I1013 22:09:27.700987 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-47qhw/crc-debug-8knnf"] Oct 13 22:09:27 crc kubenswrapper[4689]: I1013 22:09:27.703025 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-47qhw/crc-debug-8knnf" Oct 13 22:09:27 crc kubenswrapper[4689]: I1013 22:09:27.768039 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2-host\") pod \"crc-debug-8knnf\" (UID: \"accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2\") " pod="openshift-must-gather-47qhw/crc-debug-8knnf" Oct 13 22:09:27 crc kubenswrapper[4689]: I1013 22:09:27.768411 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78j49\" (UniqueName: \"kubernetes.io/projected/accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2-kube-api-access-78j49\") pod \"crc-debug-8knnf\" (UID: \"accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2\") " pod="openshift-must-gather-47qhw/crc-debug-8knnf" Oct 13 22:09:27 crc kubenswrapper[4689]: I1013 22:09:27.870569 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2-host\") pod \"crc-debug-8knnf\" (UID: \"accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2\") " pod="openshift-must-gather-47qhw/crc-debug-8knnf" Oct 13 22:09:27 crc kubenswrapper[4689]: I1013 22:09:27.870665 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2-host\") pod \"crc-debug-8knnf\" (UID: \"accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2\") " pod="openshift-must-gather-47qhw/crc-debug-8knnf" Oct 13 22:09:27 crc kubenswrapper[4689]: I1013 22:09:27.870704 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78j49\" (UniqueName: \"kubernetes.io/projected/accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2-kube-api-access-78j49\") pod \"crc-debug-8knnf\" (UID: \"accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2\") " pod="openshift-must-gather-47qhw/crc-debug-8knnf" Oct 13 22:09:27 crc kubenswrapper[4689]: I1013 22:09:27.894216 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78j49\" (UniqueName: \"kubernetes.io/projected/accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2-kube-api-access-78j49\") pod \"crc-debug-8knnf\" (UID: \"accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2\") " pod="openshift-must-gather-47qhw/crc-debug-8knnf" Oct 13 22:09:28 crc kubenswrapper[4689]: I1013 22:09:28.041191 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-47qhw/crc-debug-8knnf" Oct 13 22:09:28 crc kubenswrapper[4689]: W1013 22:09:28.094953 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaccd64fe_6f3f_4b4e_b65d_a4ce16ee5ef2.slice/crio-b5bf6c694520b72b979669529ad6c6179f1b5e432ba5b7592e9f8de3eb27ce2d WatchSource:0}: Error finding container b5bf6c694520b72b979669529ad6c6179f1b5e432ba5b7592e9f8de3eb27ce2d: Status 404 returned error can't find the container with id b5bf6c694520b72b979669529ad6c6179f1b5e432ba5b7592e9f8de3eb27ce2d Oct 13 22:09:28 crc kubenswrapper[4689]: I1013 22:09:28.435319 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-47qhw/crc-debug-8knnf" event={"ID":"accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2","Type":"ContainerStarted","Data":"b5bf6c694520b72b979669529ad6c6179f1b5e432ba5b7592e9f8de3eb27ce2d"} Oct 13 22:09:40 crc kubenswrapper[4689]: I1013 22:09:40.542183 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-47qhw/crc-debug-8knnf" event={"ID":"accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2","Type":"ContainerStarted","Data":"0eea3690f65a1d18f0f1e3f36b074cea374c2acf3a45f9c795bfead1f63ccc5f"} Oct 13 22:09:40 crc kubenswrapper[4689]: I1013 22:09:40.557786 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-47qhw/crc-debug-8knnf" podStartSLOduration=2.312071495 podStartE2EDuration="13.557770632s" podCreationTimestamp="2025-10-13 22:09:27 +0000 UTC" firstStartedPulling="2025-10-13 22:09:28.097487984 +0000 UTC m=+3485.015733069" lastFinishedPulling="2025-10-13 22:09:39.343187121 +0000 UTC m=+3496.261432206" observedRunningTime="2025-10-13 22:09:40.55385203 +0000 UTC m=+3497.472097115" watchObservedRunningTime="2025-10-13 22:09:40.557770632 +0000 UTC m=+3497.476015717" Oct 13 22:10:21 crc kubenswrapper[4689]: I1013 22:10:21.929845 4689 generic.go:334] "Generic (PLEG): container finished" podID="accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2" containerID="0eea3690f65a1d18f0f1e3f36b074cea374c2acf3a45f9c795bfead1f63ccc5f" exitCode=0 Oct 13 22:10:21 crc kubenswrapper[4689]: I1013 22:10:21.930374 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-47qhw/crc-debug-8knnf" event={"ID":"accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2","Type":"ContainerDied","Data":"0eea3690f65a1d18f0f1e3f36b074cea374c2acf3a45f9c795bfead1f63ccc5f"} Oct 13 22:10:23 crc kubenswrapper[4689]: I1013 22:10:23.040227 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-47qhw/crc-debug-8knnf" Oct 13 22:10:23 crc kubenswrapper[4689]: I1013 22:10:23.083994 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-47qhw/crc-debug-8knnf"] Oct 13 22:10:23 crc kubenswrapper[4689]: I1013 22:10:23.092331 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-47qhw/crc-debug-8knnf"] Oct 13 22:10:23 crc kubenswrapper[4689]: I1013 22:10:23.180391 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78j49\" (UniqueName: \"kubernetes.io/projected/accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2-kube-api-access-78j49\") pod \"accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2\" (UID: \"accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2\") " Oct 13 22:10:23 crc kubenswrapper[4689]: I1013 22:10:23.180568 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2-host\") pod \"accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2\" (UID: \"accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2\") " Oct 13 22:10:23 crc kubenswrapper[4689]: I1013 22:10:23.180635 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2-host" (OuterVolumeSpecName: "host") pod "accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2" (UID: "accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 22:10:23 crc kubenswrapper[4689]: I1013 22:10:23.180941 4689 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2-host\") on node \"crc\" DevicePath \"\"" Oct 13 22:10:23 crc kubenswrapper[4689]: I1013 22:10:23.186650 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2-kube-api-access-78j49" (OuterVolumeSpecName: "kube-api-access-78j49") pod "accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2" (UID: "accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2"). InnerVolumeSpecName "kube-api-access-78j49". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:10:23 crc kubenswrapper[4689]: I1013 22:10:23.282395 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78j49\" (UniqueName: \"kubernetes.io/projected/accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2-kube-api-access-78j49\") on node \"crc\" DevicePath \"\"" Oct 13 22:10:23 crc kubenswrapper[4689]: I1013 22:10:23.900837 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2" path="/var/lib/kubelet/pods/accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2/volumes" Oct 13 22:10:23 crc kubenswrapper[4689]: I1013 22:10:23.949571 4689 scope.go:117] "RemoveContainer" containerID="0eea3690f65a1d18f0f1e3f36b074cea374c2acf3a45f9c795bfead1f63ccc5f" Oct 13 22:10:23 crc kubenswrapper[4689]: I1013 22:10:23.949725 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-47qhw/crc-debug-8knnf" Oct 13 22:10:24 crc kubenswrapper[4689]: I1013 22:10:24.313971 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-47qhw/crc-debug-f7pcm"] Oct 13 22:10:24 crc kubenswrapper[4689]: E1013 22:10:24.314570 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2" containerName="container-00" Oct 13 22:10:24 crc kubenswrapper[4689]: I1013 22:10:24.314609 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2" containerName="container-00" Oct 13 22:10:24 crc kubenswrapper[4689]: I1013 22:10:24.314829 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="accd64fe-6f3f-4b4e-b65d-a4ce16ee5ef2" containerName="container-00" Oct 13 22:10:24 crc kubenswrapper[4689]: I1013 22:10:24.315486 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-47qhw/crc-debug-f7pcm" Oct 13 22:10:24 crc kubenswrapper[4689]: I1013 22:10:24.409823 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b0c371e4-b0e4-4e74-a37a-6194513fa2dc-host\") pod \"crc-debug-f7pcm\" (UID: \"b0c371e4-b0e4-4e74-a37a-6194513fa2dc\") " pod="openshift-must-gather-47qhw/crc-debug-f7pcm" Oct 13 22:10:24 crc kubenswrapper[4689]: I1013 22:10:24.410082 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvrxb\" (UniqueName: \"kubernetes.io/projected/b0c371e4-b0e4-4e74-a37a-6194513fa2dc-kube-api-access-qvrxb\") pod \"crc-debug-f7pcm\" (UID: \"b0c371e4-b0e4-4e74-a37a-6194513fa2dc\") " pod="openshift-must-gather-47qhw/crc-debug-f7pcm" Oct 13 22:10:24 crc kubenswrapper[4689]: I1013 22:10:24.512091 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b0c371e4-b0e4-4e74-a37a-6194513fa2dc-host\") pod \"crc-debug-f7pcm\" (UID: \"b0c371e4-b0e4-4e74-a37a-6194513fa2dc\") " pod="openshift-must-gather-47qhw/crc-debug-f7pcm" Oct 13 22:10:24 crc kubenswrapper[4689]: I1013 22:10:24.512260 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b0c371e4-b0e4-4e74-a37a-6194513fa2dc-host\") pod \"crc-debug-f7pcm\" (UID: \"b0c371e4-b0e4-4e74-a37a-6194513fa2dc\") " pod="openshift-must-gather-47qhw/crc-debug-f7pcm" Oct 13 22:10:24 crc kubenswrapper[4689]: I1013 22:10:24.512510 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvrxb\" (UniqueName: \"kubernetes.io/projected/b0c371e4-b0e4-4e74-a37a-6194513fa2dc-kube-api-access-qvrxb\") pod \"crc-debug-f7pcm\" (UID: \"b0c371e4-b0e4-4e74-a37a-6194513fa2dc\") " pod="openshift-must-gather-47qhw/crc-debug-f7pcm" Oct 13 22:10:24 crc kubenswrapper[4689]: I1013 22:10:24.531888 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvrxb\" (UniqueName: \"kubernetes.io/projected/b0c371e4-b0e4-4e74-a37a-6194513fa2dc-kube-api-access-qvrxb\") pod \"crc-debug-f7pcm\" (UID: \"b0c371e4-b0e4-4e74-a37a-6194513fa2dc\") " pod="openshift-must-gather-47qhw/crc-debug-f7pcm" Oct 13 22:10:24 crc kubenswrapper[4689]: I1013 22:10:24.630056 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-47qhw/crc-debug-f7pcm" Oct 13 22:10:24 crc kubenswrapper[4689]: W1013 22:10:24.660117 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb0c371e4_b0e4_4e74_a37a_6194513fa2dc.slice/crio-19d5cbd8704575a10c98f2eab480e8505c94811efa18bbde64d2b0efabfaefac WatchSource:0}: Error finding container 19d5cbd8704575a10c98f2eab480e8505c94811efa18bbde64d2b0efabfaefac: Status 404 returned error can't find the container with id 19d5cbd8704575a10c98f2eab480e8505c94811efa18bbde64d2b0efabfaefac Oct 13 22:10:24 crc kubenswrapper[4689]: I1013 22:10:24.814528 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6545bbd774-g576b_839e3e00-f791-4fb8-8df0-677c8e9a0c27/barbican-api-log/0.log" Oct 13 22:10:24 crc kubenswrapper[4689]: I1013 22:10:24.862233 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6545bbd774-g576b_839e3e00-f791-4fb8-8df0-677c8e9a0c27/barbican-api/0.log" Oct 13 22:10:24 crc kubenswrapper[4689]: I1013 22:10:24.961948 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-47qhw/crc-debug-f7pcm" event={"ID":"b0c371e4-b0e4-4e74-a37a-6194513fa2dc","Type":"ContainerStarted","Data":"482cc299786cbf81ce056be5a5d8da92b06ebe40a1d84ed3c7fb7837c22ac609"} Oct 13 22:10:24 crc kubenswrapper[4689]: I1013 22:10:24.962252 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-47qhw/crc-debug-f7pcm" event={"ID":"b0c371e4-b0e4-4e74-a37a-6194513fa2dc","Type":"ContainerStarted","Data":"19d5cbd8704575a10c98f2eab480e8505c94811efa18bbde64d2b0efabfaefac"} Oct 13 22:10:24 crc kubenswrapper[4689]: I1013 22:10:24.990647 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-47qhw/crc-debug-f7pcm" podStartSLOduration=0.990624638 podStartE2EDuration="990.624638ms" podCreationTimestamp="2025-10-13 22:10:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 22:10:24.97941979 +0000 UTC m=+3541.897664875" watchObservedRunningTime="2025-10-13 22:10:24.990624638 +0000 UTC m=+3541.908869723" Oct 13 22:10:25 crc kubenswrapper[4689]: I1013 22:10:25.049162 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7bb46f45d-mgdw4_94bcd39d-bca7-4d51-9327-aec08e22b60a/barbican-keystone-listener/0.log" Oct 13 22:10:25 crc kubenswrapper[4689]: I1013 22:10:25.217986 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7bb46f45d-mgdw4_94bcd39d-bca7-4d51-9327-aec08e22b60a/barbican-keystone-listener-log/0.log" Oct 13 22:10:25 crc kubenswrapper[4689]: I1013 22:10:25.287480 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5f6bb4646c-c8vt4_be6e9b94-6d9a-46ae-ae15-5d9516e4ee47/barbican-worker/0.log" Oct 13 22:10:25 crc kubenswrapper[4689]: I1013 22:10:25.437034 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5f6bb4646c-c8vt4_be6e9b94-6d9a-46ae-ae15-5d9516e4ee47/barbican-worker-log/0.log" Oct 13 22:10:25 crc kubenswrapper[4689]: I1013 22:10:25.581131 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk_1355811c-482d-4b45-b7cb-7e16b64debf6/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:10:25 crc kubenswrapper[4689]: I1013 22:10:25.748269 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_08fb7ee0-eee9-49f6-82e2-007abc19bd3b/ceilometer-central-agent/0.log" Oct 13 22:10:25 crc kubenswrapper[4689]: I1013 22:10:25.804627 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_08fb7ee0-eee9-49f6-82e2-007abc19bd3b/ceilometer-notification-agent/0.log" Oct 13 22:10:25 crc kubenswrapper[4689]: I1013 22:10:25.868845 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_08fb7ee0-eee9-49f6-82e2-007abc19bd3b/proxy-httpd/0.log" Oct 13 22:10:25 crc kubenswrapper[4689]: I1013 22:10:25.934087 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_08fb7ee0-eee9-49f6-82e2-007abc19bd3b/sg-core/0.log" Oct 13 22:10:25 crc kubenswrapper[4689]: I1013 22:10:25.977787 4689 generic.go:334] "Generic (PLEG): container finished" podID="b0c371e4-b0e4-4e74-a37a-6194513fa2dc" containerID="482cc299786cbf81ce056be5a5d8da92b06ebe40a1d84ed3c7fb7837c22ac609" exitCode=0 Oct 13 22:10:25 crc kubenswrapper[4689]: I1013 22:10:25.977839 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-47qhw/crc-debug-f7pcm" event={"ID":"b0c371e4-b0e4-4e74-a37a-6194513fa2dc","Type":"ContainerDied","Data":"482cc299786cbf81ce056be5a5d8da92b06ebe40a1d84ed3c7fb7837c22ac609"} Oct 13 22:10:26 crc kubenswrapper[4689]: I1013 22:10:26.097631 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_2e47a212-9a97-447e-97d9-2686a2937a05/cinder-api/0.log" Oct 13 22:10:26 crc kubenswrapper[4689]: I1013 22:10:26.127394 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_2e47a212-9a97-447e-97d9-2686a2937a05/cinder-api-log/0.log" Oct 13 22:10:26 crc kubenswrapper[4689]: I1013 22:10:26.287699 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0d79575d-670f-47b0-83a6-9c2b36f8ffd0/cinder-scheduler/0.log" Oct 13 22:10:26 crc kubenswrapper[4689]: I1013 22:10:26.323962 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0d79575d-670f-47b0-83a6-9c2b36f8ffd0/probe/0.log" Oct 13 22:10:26 crc kubenswrapper[4689]: I1013 22:10:26.475726 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-cltfg_a8d4f189-4446-410c-8cfd-b1cf669221db/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:10:26 crc kubenswrapper[4689]: I1013 22:10:26.657055 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-4b87s_9d6b52af-e31e-464d-a83b-ce21d37da265/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:10:26 crc kubenswrapper[4689]: I1013 22:10:26.751348 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-56hq2_c1cb5a31-9872-40a5-acb9-6755720fe782/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:10:26 crc kubenswrapper[4689]: I1013 22:10:26.891633 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-vrcxh_0c646c73-577a-42ba-8aa7-39bac477cb15/init/0.log" Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.113167 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-vrcxh_0c646c73-577a-42ba-8aa7-39bac477cb15/init/0.log" Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.126663 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-47qhw/crc-debug-f7pcm" Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.147562 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-vrcxh_0c646c73-577a-42ba-8aa7-39bac477cb15/dnsmasq-dns/0.log" Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.158640 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-47qhw/crc-debug-f7pcm"] Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.166741 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-47qhw/crc-debug-f7pcm"] Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.254993 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvrxb\" (UniqueName: \"kubernetes.io/projected/b0c371e4-b0e4-4e74-a37a-6194513fa2dc-kube-api-access-qvrxb\") pod \"b0c371e4-b0e4-4e74-a37a-6194513fa2dc\" (UID: \"b0c371e4-b0e4-4e74-a37a-6194513fa2dc\") " Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.255342 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b0c371e4-b0e4-4e74-a37a-6194513fa2dc-host\") pod \"b0c371e4-b0e4-4e74-a37a-6194513fa2dc\" (UID: \"b0c371e4-b0e4-4e74-a37a-6194513fa2dc\") " Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.255487 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0c371e4-b0e4-4e74-a37a-6194513fa2dc-host" (OuterVolumeSpecName: "host") pod "b0c371e4-b0e4-4e74-a37a-6194513fa2dc" (UID: "b0c371e4-b0e4-4e74-a37a-6194513fa2dc"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.255913 4689 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b0c371e4-b0e4-4e74-a37a-6194513fa2dc-host\") on node \"crc\" DevicePath \"\"" Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.261928 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0c371e4-b0e4-4e74-a37a-6194513fa2dc-kube-api-access-qvrxb" (OuterVolumeSpecName: "kube-api-access-qvrxb") pod "b0c371e4-b0e4-4e74-a37a-6194513fa2dc" (UID: "b0c371e4-b0e4-4e74-a37a-6194513fa2dc"). InnerVolumeSpecName "kube-api-access-qvrxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.344874 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-9dztk_f9b26af4-3dba-452d-9d66-715747d10f18/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.357683 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvrxb\" (UniqueName: \"kubernetes.io/projected/b0c371e4-b0e4-4e74-a37a-6194513fa2dc-kube-api-access-qvrxb\") on node \"crc\" DevicePath \"\"" Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.382819 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_2a6e5dd8-c0bc-49ec-b03a-9971dbd85486/glance-httpd/0.log" Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.514807 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_2a6e5dd8-c0bc-49ec-b03a-9971dbd85486/glance-log/0.log" Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.580916 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47/glance-httpd/0.log" Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.690842 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47/glance-log/0.log" Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.845275 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-56b8966ffb-99krc_d46a395d-e4aa-45cb-85a7-86a43d5d7371/horizon/0.log" Oct 13 22:10:27 crc kubenswrapper[4689]: I1013 22:10:27.878094 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0c371e4-b0e4-4e74-a37a-6194513fa2dc" path="/var/lib/kubelet/pods/b0c371e4-b0e4-4e74-a37a-6194513fa2dc/volumes" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.014127 4689 scope.go:117] "RemoveContainer" containerID="482cc299786cbf81ce056be5a5d8da92b06ebe40a1d84ed3c7fb7837c22ac609" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.014432 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-47qhw/crc-debug-f7pcm" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.038960 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-48qv8_af61fcd9-0c85-418a-8329-0a0dc4236f35/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.218843 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-56b8966ffb-99krc_d46a395d-e4aa-45cb-85a7-86a43d5d7371/horizon-log/0.log" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.234352 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-pdv7h_194d986e-a55b-472d-880a-789fe09fcac0/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.398607 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-47qhw/crc-debug-qmr86"] Oct 13 22:10:28 crc kubenswrapper[4689]: E1013 22:10:28.399115 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0c371e4-b0e4-4e74-a37a-6194513fa2dc" containerName="container-00" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.399139 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0c371e4-b0e4-4e74-a37a-6194513fa2dc" containerName="container-00" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.399411 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0c371e4-b0e4-4e74-a37a-6194513fa2dc" containerName="container-00" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.400192 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-47qhw/crc-debug-qmr86" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.454311 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6f78847c76-48zjm_bf43c63a-d1f7-492b-9345-d271dd62a7d2/keystone-api/0.log" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.475447 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbsb8\" (UniqueName: \"kubernetes.io/projected/13a96f76-d5ae-4019-8930-a41f21ac9776-kube-api-access-xbsb8\") pod \"crc-debug-qmr86\" (UID: \"13a96f76-d5ae-4019-8930-a41f21ac9776\") " pod="openshift-must-gather-47qhw/crc-debug-qmr86" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.475790 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/13a96f76-d5ae-4019-8930-a41f21ac9776-host\") pod \"crc-debug-qmr86\" (UID: \"13a96f76-d5ae-4019-8930-a41f21ac9776\") " pod="openshift-must-gather-47qhw/crc-debug-qmr86" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.488907 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29339881-4jbhr_d3d13686-7ebb-4ffa-bde4-4c36501a6b21/keystone-cron/0.log" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.578466 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbsb8\" (UniqueName: \"kubernetes.io/projected/13a96f76-d5ae-4019-8930-a41f21ac9776-kube-api-access-xbsb8\") pod \"crc-debug-qmr86\" (UID: \"13a96f76-d5ae-4019-8930-a41f21ac9776\") " pod="openshift-must-gather-47qhw/crc-debug-qmr86" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.578524 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/13a96f76-d5ae-4019-8930-a41f21ac9776-host\") pod \"crc-debug-qmr86\" (UID: \"13a96f76-d5ae-4019-8930-a41f21ac9776\") " pod="openshift-must-gather-47qhw/crc-debug-qmr86" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.578668 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/13a96f76-d5ae-4019-8930-a41f21ac9776-host\") pod \"crc-debug-qmr86\" (UID: \"13a96f76-d5ae-4019-8930-a41f21ac9776\") " pod="openshift-must-gather-47qhw/crc-debug-qmr86" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.596290 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbsb8\" (UniqueName: \"kubernetes.io/projected/13a96f76-d5ae-4019-8930-a41f21ac9776-kube-api-access-xbsb8\") pod \"crc-debug-qmr86\" (UID: \"13a96f76-d5ae-4019-8930-a41f21ac9776\") " pod="openshift-must-gather-47qhw/crc-debug-qmr86" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.607114 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_0c2195ac-449d-47d0-9a1b-b512a0c6b44e/kube-state-metrics/0.log" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.717520 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-47qhw/crc-debug-qmr86" Oct 13 22:10:28 crc kubenswrapper[4689]: I1013 22:10:28.725755 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c_5c27fced-a27b-4b4f-bc40-cdcb566eb633/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:10:28 crc kubenswrapper[4689]: W1013 22:10:28.746024 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13a96f76_d5ae_4019_8930_a41f21ac9776.slice/crio-7e87ac78d4204d6e91cf511cdd00949ebb27e6f1057a1baa45f482a24f1d0be4 WatchSource:0}: Error finding container 7e87ac78d4204d6e91cf511cdd00949ebb27e6f1057a1baa45f482a24f1d0be4: Status 404 returned error can't find the container with id 7e87ac78d4204d6e91cf511cdd00949ebb27e6f1057a1baa45f482a24f1d0be4 Oct 13 22:10:29 crc kubenswrapper[4689]: I1013 22:10:29.024300 4689 generic.go:334] "Generic (PLEG): container finished" podID="13a96f76-d5ae-4019-8930-a41f21ac9776" containerID="fec470e339732a3e3fc9cd019b6ca94c19cab6ac5a34496ffe8575d45d8c8fd2" exitCode=0 Oct 13 22:10:29 crc kubenswrapper[4689]: I1013 22:10:29.024361 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-47qhw/crc-debug-qmr86" event={"ID":"13a96f76-d5ae-4019-8930-a41f21ac9776","Type":"ContainerDied","Data":"fec470e339732a3e3fc9cd019b6ca94c19cab6ac5a34496ffe8575d45d8c8fd2"} Oct 13 22:10:29 crc kubenswrapper[4689]: I1013 22:10:29.024746 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-47qhw/crc-debug-qmr86" event={"ID":"13a96f76-d5ae-4019-8930-a41f21ac9776","Type":"ContainerStarted","Data":"7e87ac78d4204d6e91cf511cdd00949ebb27e6f1057a1baa45f482a24f1d0be4"} Oct 13 22:10:29 crc kubenswrapper[4689]: I1013 22:10:29.052746 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9b98684c9-9h5ml_4d52e532-8731-4838-9e3b-e316a722a0a6/neutron-httpd/0.log" Oct 13 22:10:29 crc kubenswrapper[4689]: I1013 22:10:29.059951 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9b98684c9-9h5ml_4d52e532-8731-4838-9e3b-e316a722a0a6/neutron-api/0.log" Oct 13 22:10:29 crc kubenswrapper[4689]: I1013 22:10:29.067892 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-47qhw/crc-debug-qmr86"] Oct 13 22:10:29 crc kubenswrapper[4689]: I1013 22:10:29.076038 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-47qhw/crc-debug-qmr86"] Oct 13 22:10:29 crc kubenswrapper[4689]: I1013 22:10:29.252827 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz_d7480298-e4a8-4010-a526-9ca1dba08f71/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:10:29 crc kubenswrapper[4689]: I1013 22:10:29.438540 4689 scope.go:117] "RemoveContainer" containerID="04ffdd64d3347b2a1e479cb254f514a40b2c85482eba6bd249c3ced79309ac85" Oct 13 22:10:29 crc kubenswrapper[4689]: I1013 22:10:29.461749 4689 scope.go:117] "RemoveContainer" containerID="33b179c448b822539e192cb5f1a6efc760e1707f8f45e3df1be3c790dd0b62b4" Oct 13 22:10:29 crc kubenswrapper[4689]: I1013 22:10:29.482951 4689 scope.go:117] "RemoveContainer" containerID="ddec4d2d6b2966e402354170be7013bb08fdaa74e2d276cd8e4a2810724c2791" Oct 13 22:10:29 crc kubenswrapper[4689]: I1013 22:10:29.743252 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_3d4d915e-c42b-4389-bd38-49fc12bc950a/nova-cell0-conductor-conductor/0.log" Oct 13 22:10:29 crc kubenswrapper[4689]: I1013 22:10:29.766487 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_4625bed2-2e08-4399-a0a8-fcb62b4239bc/nova-api-log/0.log" Oct 13 22:10:29 crc kubenswrapper[4689]: I1013 22:10:29.898690 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_4625bed2-2e08-4399-a0a8-fcb62b4239bc/nova-api-api/0.log" Oct 13 22:10:30 crc kubenswrapper[4689]: I1013 22:10:30.029826 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_573c1817-260c-43b0-a892-f393e2d4ba07/nova-cell1-conductor-conductor/0.log" Oct 13 22:10:30 crc kubenswrapper[4689]: I1013 22:10:30.115632 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_7b0b0a0a-623c-42db-abb2-50a50d924793/nova-cell1-novncproxy-novncproxy/0.log" Oct 13 22:10:30 crc kubenswrapper[4689]: I1013 22:10:30.160179 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-47qhw/crc-debug-qmr86" Oct 13 22:10:30 crc kubenswrapper[4689]: I1013 22:10:30.313827 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/13a96f76-d5ae-4019-8930-a41f21ac9776-host\") pod \"13a96f76-d5ae-4019-8930-a41f21ac9776\" (UID: \"13a96f76-d5ae-4019-8930-a41f21ac9776\") " Oct 13 22:10:30 crc kubenswrapper[4689]: I1013 22:10:30.313977 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbsb8\" (UniqueName: \"kubernetes.io/projected/13a96f76-d5ae-4019-8930-a41f21ac9776-kube-api-access-xbsb8\") pod \"13a96f76-d5ae-4019-8930-a41f21ac9776\" (UID: \"13a96f76-d5ae-4019-8930-a41f21ac9776\") " Oct 13 22:10:30 crc kubenswrapper[4689]: I1013 22:10:30.315094 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/13a96f76-d5ae-4019-8930-a41f21ac9776-host" (OuterVolumeSpecName: "host") pod "13a96f76-d5ae-4019-8930-a41f21ac9776" (UID: "13a96f76-d5ae-4019-8930-a41f21ac9776"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 22:10:30 crc kubenswrapper[4689]: I1013 22:10:30.321059 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13a96f76-d5ae-4019-8930-a41f21ac9776-kube-api-access-xbsb8" (OuterVolumeSpecName: "kube-api-access-xbsb8") pod "13a96f76-d5ae-4019-8930-a41f21ac9776" (UID: "13a96f76-d5ae-4019-8930-a41f21ac9776"). InnerVolumeSpecName "kube-api-access-xbsb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:10:30 crc kubenswrapper[4689]: I1013 22:10:30.326801 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-jqvm9_88053993-c10c-49d4-b69a-82c745001999/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:10:30 crc kubenswrapper[4689]: I1013 22:10:30.423738 4689 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/13a96f76-d5ae-4019-8930-a41f21ac9776-host\") on node \"crc\" DevicePath \"\"" Oct 13 22:10:30 crc kubenswrapper[4689]: I1013 22:10:30.423989 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbsb8\" (UniqueName: \"kubernetes.io/projected/13a96f76-d5ae-4019-8930-a41f21ac9776-kube-api-access-xbsb8\") on node \"crc\" DevicePath \"\"" Oct 13 22:10:30 crc kubenswrapper[4689]: I1013 22:10:30.492444 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_dd6f475e-e9b6-421d-9897-1b5a8a748a2a/nova-metadata-log/0.log" Oct 13 22:10:30 crc kubenswrapper[4689]: I1013 22:10:30.698190 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_06de4aaa-8949-449e-bb2f-65f4cffa4954/nova-scheduler-scheduler/0.log" Oct 13 22:10:30 crc kubenswrapper[4689]: I1013 22:10:30.786952 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4fc44e1c-da65-48c1-ad48-8b41c9bf4391/mysql-bootstrap/0.log" Oct 13 22:10:30 crc kubenswrapper[4689]: I1013 22:10:30.990565 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4fc44e1c-da65-48c1-ad48-8b41c9bf4391/galera/0.log" Oct 13 22:10:30 crc kubenswrapper[4689]: I1013 22:10:30.991511 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4fc44e1c-da65-48c1-ad48-8b41c9bf4391/mysql-bootstrap/0.log" Oct 13 22:10:31 crc kubenswrapper[4689]: I1013 22:10:31.063707 4689 scope.go:117] "RemoveContainer" containerID="fec470e339732a3e3fc9cd019b6ca94c19cab6ac5a34496ffe8575d45d8c8fd2" Oct 13 22:10:31 crc kubenswrapper[4689]: I1013 22:10:31.063877 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-47qhw/crc-debug-qmr86" Oct 13 22:10:31 crc kubenswrapper[4689]: I1013 22:10:31.196943 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_241dd2d8-e2a2-4653-bfc9-24255216fad4/mysql-bootstrap/0.log" Oct 13 22:10:31 crc kubenswrapper[4689]: I1013 22:10:31.383981 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_241dd2d8-e2a2-4653-bfc9-24255216fad4/mysql-bootstrap/0.log" Oct 13 22:10:31 crc kubenswrapper[4689]: I1013 22:10:31.450717 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_241dd2d8-e2a2-4653-bfc9-24255216fad4/galera/0.log" Oct 13 22:10:31 crc kubenswrapper[4689]: I1013 22:10:31.559042 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_10f0cb83-9cb9-48d1-8b9e-2217c48790d9/openstackclient/0.log" Oct 13 22:10:31 crc kubenswrapper[4689]: I1013 22:10:31.620328 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_dd6f475e-e9b6-421d-9897-1b5a8a748a2a/nova-metadata-metadata/0.log" Oct 13 22:10:31 crc kubenswrapper[4689]: I1013 22:10:31.653337 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-8t9jt_596fffc8-5b10-4da9-950c-ac58fafd2eb2/ovn-controller/0.log" Oct 13 22:10:31 crc kubenswrapper[4689]: I1013 22:10:31.810309 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-x69m8_56a091d6-e531-4956-b5aa-15f43a9c1038/openstack-network-exporter/0.log" Oct 13 22:10:31 crc kubenswrapper[4689]: I1013 22:10:31.877674 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13a96f76-d5ae-4019-8930-a41f21ac9776" path="/var/lib/kubelet/pods/13a96f76-d5ae-4019-8930-a41f21ac9776/volumes" Oct 13 22:10:31 crc kubenswrapper[4689]: I1013 22:10:31.973742 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-6fdqj_d43ea53c-112c-44ee-a9dd-d359de34d88b/ovsdb-server-init/0.log" Oct 13 22:10:32 crc kubenswrapper[4689]: I1013 22:10:32.151497 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-6fdqj_d43ea53c-112c-44ee-a9dd-d359de34d88b/ovsdb-server/0.log" Oct 13 22:10:32 crc kubenswrapper[4689]: I1013 22:10:32.179173 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-6fdqj_d43ea53c-112c-44ee-a9dd-d359de34d88b/ovs-vswitchd/0.log" Oct 13 22:10:32 crc kubenswrapper[4689]: I1013 22:10:32.179715 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-6fdqj_d43ea53c-112c-44ee-a9dd-d359de34d88b/ovsdb-server-init/0.log" Oct 13 22:10:32 crc kubenswrapper[4689]: I1013 22:10:32.380946 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-vjk7j_0185d029-cb9b-4438-a72a-6616759e267e/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:10:32 crc kubenswrapper[4689]: I1013 22:10:32.403305 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_3b50077e-96c6-4908-b3bd-5efa65b83fff/openstack-network-exporter/0.log" Oct 13 22:10:32 crc kubenswrapper[4689]: I1013 22:10:32.439689 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_3b50077e-96c6-4908-b3bd-5efa65b83fff/ovn-northd/0.log" Oct 13 22:10:32 crc kubenswrapper[4689]: I1013 22:10:32.619609 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8/openstack-network-exporter/0.log" Oct 13 22:10:32 crc kubenswrapper[4689]: I1013 22:10:32.868560 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8/ovsdbserver-nb/0.log" Oct 13 22:10:32 crc kubenswrapper[4689]: I1013 22:10:32.939966 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_d13e0078-efa1-47aa-86f7-c7e19e2283af/openstack-network-exporter/0.log" Oct 13 22:10:32 crc kubenswrapper[4689]: I1013 22:10:32.955695 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_d13e0078-efa1-47aa-86f7-c7e19e2283af/ovsdbserver-sb/0.log" Oct 13 22:10:33 crc kubenswrapper[4689]: I1013 22:10:33.110194 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7d89fff484-q9fvk_3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5/placement-api/0.log" Oct 13 22:10:33 crc kubenswrapper[4689]: I1013 22:10:33.211886 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7d89fff484-q9fvk_3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5/placement-log/0.log" Oct 13 22:10:33 crc kubenswrapper[4689]: I1013 22:10:33.338249 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4448f3de-e179-4a5c-8a6d-dd16b725bb0c/setup-container/0.log" Oct 13 22:10:33 crc kubenswrapper[4689]: I1013 22:10:33.487254 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4448f3de-e179-4a5c-8a6d-dd16b725bb0c/rabbitmq/0.log" Oct 13 22:10:33 crc kubenswrapper[4689]: I1013 22:10:33.532135 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4448f3de-e179-4a5c-8a6d-dd16b725bb0c/setup-container/0.log" Oct 13 22:10:33 crc kubenswrapper[4689]: I1013 22:10:33.660282 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_de9fccf5-fe48-498b-a6db-15e734aa9e61/setup-container/0.log" Oct 13 22:10:33 crc kubenswrapper[4689]: I1013 22:10:33.827144 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_de9fccf5-fe48-498b-a6db-15e734aa9e61/setup-container/0.log" Oct 13 22:10:33 crc kubenswrapper[4689]: I1013 22:10:33.830507 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_de9fccf5-fe48-498b-a6db-15e734aa9e61/rabbitmq/0.log" Oct 13 22:10:33 crc kubenswrapper[4689]: I1013 22:10:33.900702 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv_024b2226-0636-4d0c-8225-53b2e5ad7050/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:10:34 crc kubenswrapper[4689]: I1013 22:10:34.022049 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-65d9m_e6f26597-49c3-41a5-8352-cef0d439fd5c/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:10:34 crc kubenswrapper[4689]: I1013 22:10:34.147107 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q_44571d1c-f8f4-442a-ac47-51d05df37bfc/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:10:34 crc kubenswrapper[4689]: I1013 22:10:34.327194 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-wdbxk_5d79295b-e957-48d5-b56e-d84c50ca7250/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:10:34 crc kubenswrapper[4689]: I1013 22:10:34.355604 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-dnqdr_1d1c238b-38b4-471f-a55b-706b93036367/ssh-known-hosts-edpm-deployment/0.log" Oct 13 22:10:34 crc kubenswrapper[4689]: I1013 22:10:34.612677 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5f54846cc7-sc4qr_bae630fb-d96c-45df-abb1-d7913a06d4e6/proxy-server/0.log" Oct 13 22:10:34 crc kubenswrapper[4689]: I1013 22:10:34.736832 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5f54846cc7-sc4qr_bae630fb-d96c-45df-abb1-d7913a06d4e6/proxy-httpd/0.log" Oct 13 22:10:34 crc kubenswrapper[4689]: I1013 22:10:34.766269 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-8rq2n_0817e909-9d71-4ddd-b3e7-49e41383b1da/swift-ring-rebalance/0.log" Oct 13 22:10:34 crc kubenswrapper[4689]: I1013 22:10:34.859322 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/account-auditor/0.log" Oct 13 22:10:34 crc kubenswrapper[4689]: I1013 22:10:34.933901 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/account-reaper/0.log" Oct 13 22:10:34 crc kubenswrapper[4689]: I1013 22:10:34.999107 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/account-replicator/0.log" Oct 13 22:10:35 crc kubenswrapper[4689]: I1013 22:10:35.010280 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/account-server/0.log" Oct 13 22:10:35 crc kubenswrapper[4689]: I1013 22:10:35.078508 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/container-auditor/0.log" Oct 13 22:10:35 crc kubenswrapper[4689]: I1013 22:10:35.182721 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/container-replicator/0.log" Oct 13 22:10:35 crc kubenswrapper[4689]: I1013 22:10:35.202968 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/container-server/0.log" Oct 13 22:10:35 crc kubenswrapper[4689]: I1013 22:10:35.229388 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/container-updater/0.log" Oct 13 22:10:35 crc kubenswrapper[4689]: I1013 22:10:35.361382 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/object-auditor/0.log" Oct 13 22:10:35 crc kubenswrapper[4689]: I1013 22:10:35.369418 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/object-expirer/0.log" Oct 13 22:10:35 crc kubenswrapper[4689]: I1013 22:10:35.381784 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/object-replicator/0.log" Oct 13 22:10:35 crc kubenswrapper[4689]: I1013 22:10:35.474302 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/object-server/0.log" Oct 13 22:10:35 crc kubenswrapper[4689]: I1013 22:10:35.561820 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/rsync/0.log" Oct 13 22:10:35 crc kubenswrapper[4689]: I1013 22:10:35.565436 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/swift-recon-cron/0.log" Oct 13 22:10:35 crc kubenswrapper[4689]: I1013 22:10:35.567018 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/object-updater/0.log" Oct 13 22:10:35 crc kubenswrapper[4689]: I1013 22:10:35.835624 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-v8s47_a253b97c-0119-461e-bf69-7dfe5bb90e7f/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:10:35 crc kubenswrapper[4689]: I1013 22:10:35.902704 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_2fd6769f-1acf-441d-8569-13baec5fcf72/tempest-tests-tempest-tests-runner/0.log" Oct 13 22:10:36 crc kubenswrapper[4689]: I1013 22:10:36.014452 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_618c5ade-6473-4bb8-88b8-f92932517f5e/test-operator-logs-container/0.log" Oct 13 22:10:36 crc kubenswrapper[4689]: I1013 22:10:36.132061 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n_aed5fbf9-103b-48fb-b982-61a445ff7f09/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:10:44 crc kubenswrapper[4689]: I1013 22:10:44.907188 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_83675f56-8efb-4eb1-b6e5-65dde48c3ee4/memcached/0.log" Oct 13 22:10:53 crc kubenswrapper[4689]: I1013 22:10:53.859401 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 22:10:53 crc kubenswrapper[4689]: I1013 22:10:53.860049 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 22:10:58 crc kubenswrapper[4689]: I1013 22:10:58.092348 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd_d1a40226-990d-4a34-b499-91ee14c3da86/util/0.log" Oct 13 22:10:58 crc kubenswrapper[4689]: I1013 22:10:58.223870 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd_d1a40226-990d-4a34-b499-91ee14c3da86/util/0.log" Oct 13 22:10:58 crc kubenswrapper[4689]: I1013 22:10:58.273743 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd_d1a40226-990d-4a34-b499-91ee14c3da86/pull/0.log" Oct 13 22:10:58 crc kubenswrapper[4689]: I1013 22:10:58.284573 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd_d1a40226-990d-4a34-b499-91ee14c3da86/pull/0.log" Oct 13 22:10:58 crc kubenswrapper[4689]: I1013 22:10:58.440150 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd_d1a40226-990d-4a34-b499-91ee14c3da86/util/0.log" Oct 13 22:10:58 crc kubenswrapper[4689]: I1013 22:10:58.444697 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd_d1a40226-990d-4a34-b499-91ee14c3da86/pull/0.log" Oct 13 22:10:58 crc kubenswrapper[4689]: I1013 22:10:58.492371 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd_d1a40226-990d-4a34-b499-91ee14c3da86/extract/0.log" Oct 13 22:10:58 crc kubenswrapper[4689]: I1013 22:10:58.625046 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f84fcdbb-pvsmw_5f5620d8-6856-4b27-b74a-208edc1ec0d7/kube-rbac-proxy/0.log" Oct 13 22:10:58 crc kubenswrapper[4689]: I1013 22:10:58.661188 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-59cdc64769-w28f4_574e8237-5b30-4af8-b93f-449d9ec98793/kube-rbac-proxy/0.log" Oct 13 22:10:58 crc kubenswrapper[4689]: I1013 22:10:58.677012 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f84fcdbb-pvsmw_5f5620d8-6856-4b27-b74a-208edc1ec0d7/manager/0.log" Oct 13 22:10:58 crc kubenswrapper[4689]: I1013 22:10:58.849138 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-59cdc64769-w28f4_574e8237-5b30-4af8-b93f-449d9ec98793/manager/0.log" Oct 13 22:10:58 crc kubenswrapper[4689]: I1013 22:10:58.872084 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-687df44cdb-ltx5l_3bca3670-4880-4598-abbd-8ed51e351c5a/kube-rbac-proxy/0.log" Oct 13 22:10:58 crc kubenswrapper[4689]: I1013 22:10:58.904820 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-687df44cdb-ltx5l_3bca3670-4880-4598-abbd-8ed51e351c5a/manager/0.log" Oct 13 22:10:59 crc kubenswrapper[4689]: I1013 22:10:59.047090 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7bb46cd7d-zm9b4_2ed371e4-bae8-4320-9b6b-e28103137aee/kube-rbac-proxy/0.log" Oct 13 22:10:59 crc kubenswrapper[4689]: I1013 22:10:59.095727 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7bb46cd7d-zm9b4_2ed371e4-bae8-4320-9b6b-e28103137aee/manager/0.log" Oct 13 22:10:59 crc kubenswrapper[4689]: I1013 22:10:59.224257 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-6d9967f8dd-8lb6r_980922e5-08ec-418a-b207-f463195cc6da/kube-rbac-proxy/0.log" Oct 13 22:10:59 crc kubenswrapper[4689]: I1013 22:10:59.256371 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-6d9967f8dd-8lb6r_980922e5-08ec-418a-b207-f463195cc6da/manager/0.log" Oct 13 22:10:59 crc kubenswrapper[4689]: I1013 22:10:59.295715 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d74794d9b-gg4tl_97a56885-e550-415b-95be-3f61e0ac38e5/kube-rbac-proxy/0.log" Oct 13 22:10:59 crc kubenswrapper[4689]: I1013 22:10:59.392059 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d74794d9b-gg4tl_97a56885-e550-415b-95be-3f61e0ac38e5/manager/0.log" Oct 13 22:10:59 crc kubenswrapper[4689]: I1013 22:10:59.427782 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-585fc5b659-v8wsd_ff2d1098-a378-4314-8662-1dfb98c56aae/kube-rbac-proxy/0.log" Oct 13 22:10:59 crc kubenswrapper[4689]: I1013 22:10:59.585333 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-74cb5cbc49-mrv92_4e3b3f49-bb44-4375-9bab-527a5e0e57a5/kube-rbac-proxy/0.log" Oct 13 22:10:59 crc kubenswrapper[4689]: I1013 22:10:59.615000 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-585fc5b659-v8wsd_ff2d1098-a378-4314-8662-1dfb98c56aae/manager/0.log" Oct 13 22:10:59 crc kubenswrapper[4689]: I1013 22:10:59.666006 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-74cb5cbc49-mrv92_4e3b3f49-bb44-4375-9bab-527a5e0e57a5/manager/0.log" Oct 13 22:10:59 crc kubenswrapper[4689]: I1013 22:10:59.846169 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-ddb98f99b-7zs79_de26ce24-8f8f-42e6-bd80-5331eb11f6b1/kube-rbac-proxy/0.log" Oct 13 22:10:59 crc kubenswrapper[4689]: I1013 22:10:59.943721 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-ddb98f99b-7zs79_de26ce24-8f8f-42e6-bd80-5331eb11f6b1/manager/0.log" Oct 13 22:11:00 crc kubenswrapper[4689]: I1013 22:11:00.023190 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-59578bc799-v664m_f3d70a25-802f-4d17-a250-3b76584ff7dc/manager/0.log" Oct 13 22:11:00 crc kubenswrapper[4689]: I1013 22:11:00.043449 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-59578bc799-v664m_f3d70a25-802f-4d17-a250-3b76584ff7dc/kube-rbac-proxy/0.log" Oct 13 22:11:00 crc kubenswrapper[4689]: I1013 22:11:00.132159 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5777b4f897-qjpss_87d04908-37f4-42ab-8328-893b4e255767/kube-rbac-proxy/0.log" Oct 13 22:11:00 crc kubenswrapper[4689]: I1013 22:11:00.200696 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5777b4f897-qjpss_87d04908-37f4-42ab-8328-893b4e255767/manager/0.log" Oct 13 22:11:00 crc kubenswrapper[4689]: I1013 22:11:00.297367 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-797d478b46-jlgcf_5dc35208-04aa-4df8-af17-6ce8ad80199f/kube-rbac-proxy/0.log" Oct 13 22:11:00 crc kubenswrapper[4689]: I1013 22:11:00.359538 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-797d478b46-jlgcf_5dc35208-04aa-4df8-af17-6ce8ad80199f/manager/0.log" Oct 13 22:11:00 crc kubenswrapper[4689]: I1013 22:11:00.477793 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57bb74c7bf-xpqj5_86e5e806-711e-4a41-9c65-0b121d0228e6/kube-rbac-proxy/0.log" Oct 13 22:11:00 crc kubenswrapper[4689]: I1013 22:11:00.588151 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57bb74c7bf-xpqj5_86e5e806-711e-4a41-9c65-0b121d0228e6/manager/0.log" Oct 13 22:11:00 crc kubenswrapper[4689]: I1013 22:11:00.611113 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6d7c7ddf95-kd8d5_bc1916e6-51d4-4ca9-b8a2-8be1659426a2/kube-rbac-proxy/0.log" Oct 13 22:11:00 crc kubenswrapper[4689]: I1013 22:11:00.652693 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6d7c7ddf95-kd8d5_bc1916e6-51d4-4ca9-b8a2-8be1659426a2/manager/0.log" Oct 13 22:11:00 crc kubenswrapper[4689]: I1013 22:11:00.786185 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7_599721f1-ec3e-4a83-b769-db5440b2f260/kube-rbac-proxy/0.log" Oct 13 22:11:00 crc kubenswrapper[4689]: I1013 22:11:00.795095 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7_599721f1-ec3e-4a83-b769-db5440b2f260/manager/0.log" Oct 13 22:11:00 crc kubenswrapper[4689]: I1013 22:11:00.979319 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7fcd588594-tnfjj_654be83b-acf2-4c39-b753-8f4cc7750052/kube-rbac-proxy/0.log" Oct 13 22:11:01 crc kubenswrapper[4689]: I1013 22:11:01.038381 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5555666847-lgpdl_6a94802f-3575-410f-8d65-f1d11165a10e/kube-rbac-proxy/0.log" Oct 13 22:11:01 crc kubenswrapper[4689]: I1013 22:11:01.284507 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-wv7t5_c2f0840b-8103-4f7a-8698-3fd60e779a59/registry-server/0.log" Oct 13 22:11:01 crc kubenswrapper[4689]: I1013 22:11:01.376704 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5555666847-lgpdl_6a94802f-3575-410f-8d65-f1d11165a10e/operator/0.log" Oct 13 22:11:01 crc kubenswrapper[4689]: I1013 22:11:01.474422 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-869cc7797f-f2kxw_d34fac28-ebdd-4c77-ad9d-995611ee01d4/kube-rbac-proxy/0.log" Oct 13 22:11:01 crc kubenswrapper[4689]: I1013 22:11:01.656207 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-869cc7797f-f2kxw_d34fac28-ebdd-4c77-ad9d-995611ee01d4/manager/0.log" Oct 13 22:11:01 crc kubenswrapper[4689]: I1013 22:11:01.768729 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-664664cb68-j6wsf_261f1cfd-d8a7-4dea-baa7-3feb8f67813a/kube-rbac-proxy/0.log" Oct 13 22:11:01 crc kubenswrapper[4689]: I1013 22:11:01.770990 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-664664cb68-j6wsf_261f1cfd-d8a7-4dea-baa7-3feb8f67813a/manager/0.log" Oct 13 22:11:01 crc kubenswrapper[4689]: I1013 22:11:01.975261 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f4d5dfdc6-2g8g7_3c9cfbce-22ae-4c0d-9b73-513bf285b4a0/kube-rbac-proxy/0.log" Oct 13 22:11:02 crc kubenswrapper[4689]: I1013 22:11:02.045643 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl_04a373fa-1962-4bdc-8e26-53d557df6be3/operator/0.log" Oct 13 22:11:02 crc kubenswrapper[4689]: I1013 22:11:02.084728 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7fcd588594-tnfjj_654be83b-acf2-4c39-b753-8f4cc7750052/manager/0.log" Oct 13 22:11:02 crc kubenswrapper[4689]: I1013 22:11:02.180375 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-578874c84d-n52xn_6c0d5f43-6334-41be-bb4f-9d538d40004a/kube-rbac-proxy/0.log" Oct 13 22:11:02 crc kubenswrapper[4689]: I1013 22:11:02.197810 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f4d5dfdc6-2g8g7_3c9cfbce-22ae-4c0d-9b73-513bf285b4a0/manager/0.log" Oct 13 22:11:02 crc kubenswrapper[4689]: I1013 22:11:02.269415 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-578874c84d-n52xn_6c0d5f43-6334-41be-bb4f-9d538d40004a/manager/0.log" Oct 13 22:11:02 crc kubenswrapper[4689]: I1013 22:11:02.418869 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-ffcdd6c94-fcfzv_d9a167f4-4f3c-44d9-9e18-7fdf79273d12/kube-rbac-proxy/0.log" Oct 13 22:11:02 crc kubenswrapper[4689]: I1013 22:11:02.426222 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-ffcdd6c94-fcfzv_d9a167f4-4f3c-44d9-9e18-7fdf79273d12/manager/0.log" Oct 13 22:11:02 crc kubenswrapper[4689]: I1013 22:11:02.474455 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-646675d848-t5zxf_61a8f77a-a34b-4e04-b508-fc0fb8e7ede7/kube-rbac-proxy/0.log" Oct 13 22:11:02 crc kubenswrapper[4689]: I1013 22:11:02.527772 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-646675d848-t5zxf_61a8f77a-a34b-4e04-b508-fc0fb8e7ede7/manager/0.log" Oct 13 22:11:16 crc kubenswrapper[4689]: I1013 22:11:16.468461 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-nmkts_1c2ff4c0-e878-4984-a12a-1b4f8cb3bc54/control-plane-machine-set-operator/0.log" Oct 13 22:11:16 crc kubenswrapper[4689]: I1013 22:11:16.656321 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fdwnr_202442b7-241e-44ee-b24f-0eac63864890/kube-rbac-proxy/0.log" Oct 13 22:11:16 crc kubenswrapper[4689]: I1013 22:11:16.708806 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fdwnr_202442b7-241e-44ee-b24f-0eac63864890/machine-api-operator/0.log" Oct 13 22:11:23 crc kubenswrapper[4689]: I1013 22:11:23.858958 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 22:11:23 crc kubenswrapper[4689]: I1013 22:11:23.859613 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 22:11:27 crc kubenswrapper[4689]: I1013 22:11:27.715228 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-g6vgs_8e5b1294-2785-4023-857f-e404eaed07fb/cert-manager-controller/0.log" Oct 13 22:11:27 crc kubenswrapper[4689]: I1013 22:11:27.803507 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-ndlw8_afb8356c-40a3-4270-bbe2-644b8b14482f/cert-manager-cainjector/0.log" Oct 13 22:11:27 crc kubenswrapper[4689]: I1013 22:11:27.901679 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-52xlv_693e1ab8-5677-4bdd-bb02-b8540de9513a/cert-manager-webhook/0.log" Oct 13 22:11:38 crc kubenswrapper[4689]: I1013 22:11:38.694831 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-47vrf_15bec576-7113-4f6f-8f5a-ed95b3e01608/nmstate-console-plugin/0.log" Oct 13 22:11:39 crc kubenswrapper[4689]: I1013 22:11:39.085011 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-9nfcm_63209f73-d6db-4f5e-9863-37c7d8555f1d/nmstate-handler/0.log" Oct 13 22:11:39 crc kubenswrapper[4689]: I1013 22:11:39.189865 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-tgngb_a76e7989-6bed-472b-8a4f-53227f485adb/nmstate-metrics/0.log" Oct 13 22:11:39 crc kubenswrapper[4689]: I1013 22:11:39.193705 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-tgngb_a76e7989-6bed-472b-8a4f-53227f485adb/kube-rbac-proxy/0.log" Oct 13 22:11:39 crc kubenswrapper[4689]: I1013 22:11:39.320878 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-bmmp2_189fae32-9490-4991-b5c0-2ba0de67d337/nmstate-operator/0.log" Oct 13 22:11:39 crc kubenswrapper[4689]: I1013 22:11:39.432367 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-v7tfb_2f1f259b-9d4c-469c-b336-0f7c4fdac5be/nmstate-webhook/0.log" Oct 13 22:11:51 crc kubenswrapper[4689]: I1013 22:11:51.431294 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-j2gsc"] Oct 13 22:11:51 crc kubenswrapper[4689]: E1013 22:11:51.432225 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13a96f76-d5ae-4019-8930-a41f21ac9776" containerName="container-00" Oct 13 22:11:51 crc kubenswrapper[4689]: I1013 22:11:51.432245 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="13a96f76-d5ae-4019-8930-a41f21ac9776" containerName="container-00" Oct 13 22:11:51 crc kubenswrapper[4689]: I1013 22:11:51.432562 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="13a96f76-d5ae-4019-8930-a41f21ac9776" containerName="container-00" Oct 13 22:11:51 crc kubenswrapper[4689]: I1013 22:11:51.434058 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j2gsc" Oct 13 22:11:51 crc kubenswrapper[4689]: I1013 22:11:51.450969 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j2gsc"] Oct 13 22:11:51 crc kubenswrapper[4689]: I1013 22:11:51.556258 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-utilities\") pod \"certified-operators-j2gsc\" (UID: \"9cfe7e8e-582d-4e20-ae6a-74a359582cb5\") " pod="openshift-marketplace/certified-operators-j2gsc" Oct 13 22:11:51 crc kubenswrapper[4689]: I1013 22:11:51.556686 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-catalog-content\") pod \"certified-operators-j2gsc\" (UID: \"9cfe7e8e-582d-4e20-ae6a-74a359582cb5\") " pod="openshift-marketplace/certified-operators-j2gsc" Oct 13 22:11:51 crc kubenswrapper[4689]: I1013 22:11:51.556785 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96q2v\" (UniqueName: \"kubernetes.io/projected/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-kube-api-access-96q2v\") pod \"certified-operators-j2gsc\" (UID: \"9cfe7e8e-582d-4e20-ae6a-74a359582cb5\") " pod="openshift-marketplace/certified-operators-j2gsc" Oct 13 22:11:51 crc kubenswrapper[4689]: I1013 22:11:51.658289 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-catalog-content\") pod \"certified-operators-j2gsc\" (UID: \"9cfe7e8e-582d-4e20-ae6a-74a359582cb5\") " pod="openshift-marketplace/certified-operators-j2gsc" Oct 13 22:11:51 crc kubenswrapper[4689]: I1013 22:11:51.658355 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96q2v\" (UniqueName: \"kubernetes.io/projected/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-kube-api-access-96q2v\") pod \"certified-operators-j2gsc\" (UID: \"9cfe7e8e-582d-4e20-ae6a-74a359582cb5\") " pod="openshift-marketplace/certified-operators-j2gsc" Oct 13 22:11:51 crc kubenswrapper[4689]: I1013 22:11:51.658834 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-utilities\") pod \"certified-operators-j2gsc\" (UID: \"9cfe7e8e-582d-4e20-ae6a-74a359582cb5\") " pod="openshift-marketplace/certified-operators-j2gsc" Oct 13 22:11:51 crc kubenswrapper[4689]: I1013 22:11:51.658848 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-catalog-content\") pod \"certified-operators-j2gsc\" (UID: \"9cfe7e8e-582d-4e20-ae6a-74a359582cb5\") " pod="openshift-marketplace/certified-operators-j2gsc" Oct 13 22:11:51 crc kubenswrapper[4689]: I1013 22:11:51.659156 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-utilities\") pod \"certified-operators-j2gsc\" (UID: \"9cfe7e8e-582d-4e20-ae6a-74a359582cb5\") " pod="openshift-marketplace/certified-operators-j2gsc" Oct 13 22:11:51 crc kubenswrapper[4689]: I1013 22:11:51.677605 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96q2v\" (UniqueName: \"kubernetes.io/projected/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-kube-api-access-96q2v\") pod \"certified-operators-j2gsc\" (UID: \"9cfe7e8e-582d-4e20-ae6a-74a359582cb5\") " pod="openshift-marketplace/certified-operators-j2gsc" Oct 13 22:11:51 crc kubenswrapper[4689]: I1013 22:11:51.756116 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j2gsc" Oct 13 22:11:52 crc kubenswrapper[4689]: I1013 22:11:52.284122 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j2gsc"] Oct 13 22:11:52 crc kubenswrapper[4689]: I1013 22:11:52.771199 4689 generic.go:334] "Generic (PLEG): container finished" podID="9cfe7e8e-582d-4e20-ae6a-74a359582cb5" containerID="77af3c50b0afdfbb7c92496c0a4c838a459efdd722c9fe04a173f06c3153d50c" exitCode=0 Oct 13 22:11:52 crc kubenswrapper[4689]: I1013 22:11:52.771242 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j2gsc" event={"ID":"9cfe7e8e-582d-4e20-ae6a-74a359582cb5","Type":"ContainerDied","Data":"77af3c50b0afdfbb7c92496c0a4c838a459efdd722c9fe04a173f06c3153d50c"} Oct 13 22:11:52 crc kubenswrapper[4689]: I1013 22:11:52.771516 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j2gsc" event={"ID":"9cfe7e8e-582d-4e20-ae6a-74a359582cb5","Type":"ContainerStarted","Data":"e54e1cf9c9b570dbc4ba5163b8b63e7344e62ef65a685f32bed6fb3caec158de"} Oct 13 22:11:53 crc kubenswrapper[4689]: I1013 22:11:53.690688 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-fzzmz_692201d0-1473-499e-b9e6-2d35e6c72032/kube-rbac-proxy/0.log" Oct 13 22:11:53 crc kubenswrapper[4689]: I1013 22:11:53.785042 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j2gsc" event={"ID":"9cfe7e8e-582d-4e20-ae6a-74a359582cb5","Type":"ContainerStarted","Data":"8146cd31b579e6495556e8e37ae6a9f0f6659fcb788f460ebce5e2b1255a2001"} Oct 13 22:11:53 crc kubenswrapper[4689]: I1013 22:11:53.858571 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 22:11:53 crc kubenswrapper[4689]: I1013 22:11:53.858811 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 22:11:53 crc kubenswrapper[4689]: I1013 22:11:53.858924 4689 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 22:11:53 crc kubenswrapper[4689]: I1013 22:11:53.859572 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-fzzmz_692201d0-1473-499e-b9e6-2d35e6c72032/controller/0.log" Oct 13 22:11:53 crc kubenswrapper[4689]: I1013 22:11:53.859835 4689 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8308c97505954cfd53e34046f29d414baee75c943899bf62fc757653698c4248"} pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 22:11:53 crc kubenswrapper[4689]: I1013 22:11:53.859945 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" containerID="cri-o://8308c97505954cfd53e34046f29d414baee75c943899bf62fc757653698c4248" gracePeriod=600 Oct 13 22:11:53 crc kubenswrapper[4689]: I1013 22:11:53.943228 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-frr-files/0.log" Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.110534 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-reloader/0.log" Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.139565 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-frr-files/0.log" Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.146732 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-metrics/0.log" Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.148824 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-reloader/0.log" Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.397547 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-reloader/0.log" Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.400138 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-metrics/0.log" Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.400325 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-metrics/0.log" Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.402485 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-frr-files/0.log" Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.582427 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-reloader/0.log" Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.589562 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-metrics/0.log" Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.626988 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/controller/0.log" Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.636046 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-frr-files/0.log" Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.796014 4689 generic.go:334] "Generic (PLEG): container finished" podID="1863da92-265f-451e-a741-a184c8d3f781" containerID="8308c97505954cfd53e34046f29d414baee75c943899bf62fc757653698c4248" exitCode=0 Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.796100 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerDied","Data":"8308c97505954cfd53e34046f29d414baee75c943899bf62fc757653698c4248"} Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.796428 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerStarted","Data":"b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d"} Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.796447 4689 scope.go:117] "RemoveContainer" containerID="fd44a69715bcd9c01306c377b31acee5343330d5e2d3a2b6fe6363dee4c643e0" Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.799696 4689 generic.go:334] "Generic (PLEG): container finished" podID="9cfe7e8e-582d-4e20-ae6a-74a359582cb5" containerID="8146cd31b579e6495556e8e37ae6a9f0f6659fcb788f460ebce5e2b1255a2001" exitCode=0 Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.799732 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j2gsc" event={"ID":"9cfe7e8e-582d-4e20-ae6a-74a359582cb5","Type":"ContainerDied","Data":"8146cd31b579e6495556e8e37ae6a9f0f6659fcb788f460ebce5e2b1255a2001"} Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.822506 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/kube-rbac-proxy/0.log" Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.861416 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/frr-metrics/0.log" Oct 13 22:11:54 crc kubenswrapper[4689]: I1013 22:11:54.862200 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/kube-rbac-proxy-frr/0.log" Oct 13 22:11:55 crc kubenswrapper[4689]: I1013 22:11:55.026879 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/reloader/0.log" Oct 13 22:11:55 crc kubenswrapper[4689]: I1013 22:11:55.078173 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-bqf92_13456adb-0ae6-4db3-a924-dabf915a24aa/frr-k8s-webhook-server/0.log" Oct 13 22:11:55 crc kubenswrapper[4689]: I1013 22:11:55.322285 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5dd59d54d9-fw8tw_61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1/manager/0.log" Oct 13 22:11:55 crc kubenswrapper[4689]: I1013 22:11:55.461336 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5776bf7669-g6lbn_58b9bc04-cebe-4c96-9fdc-14fd4a71f45e/webhook-server/0.log" Oct 13 22:11:55 crc kubenswrapper[4689]: I1013 22:11:55.571812 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-z2p9k_77872557-bf06-47e5-b7cb-0101ddd79f56/kube-rbac-proxy/0.log" Oct 13 22:11:55 crc kubenswrapper[4689]: I1013 22:11:55.857527 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j2gsc" event={"ID":"9cfe7e8e-582d-4e20-ae6a-74a359582cb5","Type":"ContainerStarted","Data":"cc9c341608e1e8c7304950aed2c77e530c7db358a4e3fafe245ec27bfff3ab75"} Oct 13 22:11:55 crc kubenswrapper[4689]: I1013 22:11:55.871336 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/frr/0.log" Oct 13 22:11:55 crc kubenswrapper[4689]: I1013 22:11:55.878428 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-j2gsc" podStartSLOduration=2.444697015 podStartE2EDuration="4.878412586s" podCreationTimestamp="2025-10-13 22:11:51 +0000 UTC" firstStartedPulling="2025-10-13 22:11:52.772739751 +0000 UTC m=+3629.690984836" lastFinishedPulling="2025-10-13 22:11:55.206455322 +0000 UTC m=+3632.124700407" observedRunningTime="2025-10-13 22:11:55.877536015 +0000 UTC m=+3632.795781100" watchObservedRunningTime="2025-10-13 22:11:55.878412586 +0000 UTC m=+3632.796657671" Oct 13 22:11:56 crc kubenswrapper[4689]: I1013 22:11:56.103325 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-z2p9k_77872557-bf06-47e5-b7cb-0101ddd79f56/speaker/0.log" Oct 13 22:12:01 crc kubenswrapper[4689]: I1013 22:12:01.756511 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-j2gsc" Oct 13 22:12:01 crc kubenswrapper[4689]: I1013 22:12:01.757072 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-j2gsc" Oct 13 22:12:01 crc kubenswrapper[4689]: I1013 22:12:01.799956 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-j2gsc" Oct 13 22:12:01 crc kubenswrapper[4689]: I1013 22:12:01.952335 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-j2gsc" Oct 13 22:12:02 crc kubenswrapper[4689]: I1013 22:12:02.036966 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j2gsc"] Oct 13 22:12:03 crc kubenswrapper[4689]: I1013 22:12:03.929205 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-j2gsc" podUID="9cfe7e8e-582d-4e20-ae6a-74a359582cb5" containerName="registry-server" containerID="cri-o://cc9c341608e1e8c7304950aed2c77e530c7db358a4e3fafe245ec27bfff3ab75" gracePeriod=2 Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.381331 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j2gsc" Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.496485 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-utilities\") pod \"9cfe7e8e-582d-4e20-ae6a-74a359582cb5\" (UID: \"9cfe7e8e-582d-4e20-ae6a-74a359582cb5\") " Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.496572 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96q2v\" (UniqueName: \"kubernetes.io/projected/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-kube-api-access-96q2v\") pod \"9cfe7e8e-582d-4e20-ae6a-74a359582cb5\" (UID: \"9cfe7e8e-582d-4e20-ae6a-74a359582cb5\") " Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.496870 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-catalog-content\") pod \"9cfe7e8e-582d-4e20-ae6a-74a359582cb5\" (UID: \"9cfe7e8e-582d-4e20-ae6a-74a359582cb5\") " Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.497565 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-utilities" (OuterVolumeSpecName: "utilities") pod "9cfe7e8e-582d-4e20-ae6a-74a359582cb5" (UID: "9cfe7e8e-582d-4e20-ae6a-74a359582cb5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.503301 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-kube-api-access-96q2v" (OuterVolumeSpecName: "kube-api-access-96q2v") pod "9cfe7e8e-582d-4e20-ae6a-74a359582cb5" (UID: "9cfe7e8e-582d-4e20-ae6a-74a359582cb5"). InnerVolumeSpecName "kube-api-access-96q2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.558109 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9cfe7e8e-582d-4e20-ae6a-74a359582cb5" (UID: "9cfe7e8e-582d-4e20-ae6a-74a359582cb5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.599341 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.599383 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.599395 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96q2v\" (UniqueName: \"kubernetes.io/projected/9cfe7e8e-582d-4e20-ae6a-74a359582cb5-kube-api-access-96q2v\") on node \"crc\" DevicePath \"\"" Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.940857 4689 generic.go:334] "Generic (PLEG): container finished" podID="9cfe7e8e-582d-4e20-ae6a-74a359582cb5" containerID="cc9c341608e1e8c7304950aed2c77e530c7db358a4e3fafe245ec27bfff3ab75" exitCode=0 Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.940922 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j2gsc" event={"ID":"9cfe7e8e-582d-4e20-ae6a-74a359582cb5","Type":"ContainerDied","Data":"cc9c341608e1e8c7304950aed2c77e530c7db358a4e3fafe245ec27bfff3ab75"} Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.940949 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j2gsc" event={"ID":"9cfe7e8e-582d-4e20-ae6a-74a359582cb5","Type":"ContainerDied","Data":"e54e1cf9c9b570dbc4ba5163b8b63e7344e62ef65a685f32bed6fb3caec158de"} Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.940963 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j2gsc" Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.940981 4689 scope.go:117] "RemoveContainer" containerID="cc9c341608e1e8c7304950aed2c77e530c7db358a4e3fafe245ec27bfff3ab75" Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.964138 4689 scope.go:117] "RemoveContainer" containerID="8146cd31b579e6495556e8e37ae6a9f0f6659fcb788f460ebce5e2b1255a2001" Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.978728 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j2gsc"] Oct 13 22:12:04 crc kubenswrapper[4689]: I1013 22:12:04.985881 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-j2gsc"] Oct 13 22:12:05 crc kubenswrapper[4689]: I1013 22:12:05.004174 4689 scope.go:117] "RemoveContainer" containerID="77af3c50b0afdfbb7c92496c0a4c838a459efdd722c9fe04a173f06c3153d50c" Oct 13 22:12:05 crc kubenswrapper[4689]: I1013 22:12:05.034045 4689 scope.go:117] "RemoveContainer" containerID="cc9c341608e1e8c7304950aed2c77e530c7db358a4e3fafe245ec27bfff3ab75" Oct 13 22:12:05 crc kubenswrapper[4689]: E1013 22:12:05.034342 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc9c341608e1e8c7304950aed2c77e530c7db358a4e3fafe245ec27bfff3ab75\": container with ID starting with cc9c341608e1e8c7304950aed2c77e530c7db358a4e3fafe245ec27bfff3ab75 not found: ID does not exist" containerID="cc9c341608e1e8c7304950aed2c77e530c7db358a4e3fafe245ec27bfff3ab75" Oct 13 22:12:05 crc kubenswrapper[4689]: I1013 22:12:05.034384 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc9c341608e1e8c7304950aed2c77e530c7db358a4e3fafe245ec27bfff3ab75"} err="failed to get container status \"cc9c341608e1e8c7304950aed2c77e530c7db358a4e3fafe245ec27bfff3ab75\": rpc error: code = NotFound desc = could not find container \"cc9c341608e1e8c7304950aed2c77e530c7db358a4e3fafe245ec27bfff3ab75\": container with ID starting with cc9c341608e1e8c7304950aed2c77e530c7db358a4e3fafe245ec27bfff3ab75 not found: ID does not exist" Oct 13 22:12:05 crc kubenswrapper[4689]: I1013 22:12:05.034403 4689 scope.go:117] "RemoveContainer" containerID="8146cd31b579e6495556e8e37ae6a9f0f6659fcb788f460ebce5e2b1255a2001" Oct 13 22:12:05 crc kubenswrapper[4689]: E1013 22:12:05.034898 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8146cd31b579e6495556e8e37ae6a9f0f6659fcb788f460ebce5e2b1255a2001\": container with ID starting with 8146cd31b579e6495556e8e37ae6a9f0f6659fcb788f460ebce5e2b1255a2001 not found: ID does not exist" containerID="8146cd31b579e6495556e8e37ae6a9f0f6659fcb788f460ebce5e2b1255a2001" Oct 13 22:12:05 crc kubenswrapper[4689]: I1013 22:12:05.034953 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8146cd31b579e6495556e8e37ae6a9f0f6659fcb788f460ebce5e2b1255a2001"} err="failed to get container status \"8146cd31b579e6495556e8e37ae6a9f0f6659fcb788f460ebce5e2b1255a2001\": rpc error: code = NotFound desc = could not find container \"8146cd31b579e6495556e8e37ae6a9f0f6659fcb788f460ebce5e2b1255a2001\": container with ID starting with 8146cd31b579e6495556e8e37ae6a9f0f6659fcb788f460ebce5e2b1255a2001 not found: ID does not exist" Oct 13 22:12:05 crc kubenswrapper[4689]: I1013 22:12:05.034972 4689 scope.go:117] "RemoveContainer" containerID="77af3c50b0afdfbb7c92496c0a4c838a459efdd722c9fe04a173f06c3153d50c" Oct 13 22:12:05 crc kubenswrapper[4689]: E1013 22:12:05.035287 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77af3c50b0afdfbb7c92496c0a4c838a459efdd722c9fe04a173f06c3153d50c\": container with ID starting with 77af3c50b0afdfbb7c92496c0a4c838a459efdd722c9fe04a173f06c3153d50c not found: ID does not exist" containerID="77af3c50b0afdfbb7c92496c0a4c838a459efdd722c9fe04a173f06c3153d50c" Oct 13 22:12:05 crc kubenswrapper[4689]: I1013 22:12:05.035325 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77af3c50b0afdfbb7c92496c0a4c838a459efdd722c9fe04a173f06c3153d50c"} err="failed to get container status \"77af3c50b0afdfbb7c92496c0a4c838a459efdd722c9fe04a173f06c3153d50c\": rpc error: code = NotFound desc = could not find container \"77af3c50b0afdfbb7c92496c0a4c838a459efdd722c9fe04a173f06c3153d50c\": container with ID starting with 77af3c50b0afdfbb7c92496c0a4c838a459efdd722c9fe04a173f06c3153d50c not found: ID does not exist" Oct 13 22:12:05 crc kubenswrapper[4689]: I1013 22:12:05.876497 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cfe7e8e-582d-4e20-ae6a-74a359582cb5" path="/var/lib/kubelet/pods/9cfe7e8e-582d-4e20-ae6a-74a359582cb5/volumes" Oct 13 22:12:06 crc kubenswrapper[4689]: I1013 22:12:06.967463 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94_8adf85c6-59ab-4e3f-8830-9d7509cb34b4/util/0.log" Oct 13 22:12:07 crc kubenswrapper[4689]: I1013 22:12:07.179123 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94_8adf85c6-59ab-4e3f-8830-9d7509cb34b4/pull/0.log" Oct 13 22:12:07 crc kubenswrapper[4689]: I1013 22:12:07.189314 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94_8adf85c6-59ab-4e3f-8830-9d7509cb34b4/util/0.log" Oct 13 22:12:07 crc kubenswrapper[4689]: I1013 22:12:07.210099 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94_8adf85c6-59ab-4e3f-8830-9d7509cb34b4/pull/0.log" Oct 13 22:12:07 crc kubenswrapper[4689]: I1013 22:12:07.366193 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94_8adf85c6-59ab-4e3f-8830-9d7509cb34b4/util/0.log" Oct 13 22:12:07 crc kubenswrapper[4689]: I1013 22:12:07.376420 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94_8adf85c6-59ab-4e3f-8830-9d7509cb34b4/pull/0.log" Oct 13 22:12:07 crc kubenswrapper[4689]: I1013 22:12:07.399882 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94_8adf85c6-59ab-4e3f-8830-9d7509cb34b4/extract/0.log" Oct 13 22:12:07 crc kubenswrapper[4689]: I1013 22:12:07.552364 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5bqtl_7f15d479-e732-4ffb-bb8a-c51ce29ffb85/extract-utilities/0.log" Oct 13 22:12:07 crc kubenswrapper[4689]: I1013 22:12:07.676795 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5bqtl_7f15d479-e732-4ffb-bb8a-c51ce29ffb85/extract-utilities/0.log" Oct 13 22:12:07 crc kubenswrapper[4689]: I1013 22:12:07.705082 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5bqtl_7f15d479-e732-4ffb-bb8a-c51ce29ffb85/extract-content/0.log" Oct 13 22:12:07 crc kubenswrapper[4689]: I1013 22:12:07.724187 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5bqtl_7f15d479-e732-4ffb-bb8a-c51ce29ffb85/extract-content/0.log" Oct 13 22:12:07 crc kubenswrapper[4689]: I1013 22:12:07.879143 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5bqtl_7f15d479-e732-4ffb-bb8a-c51ce29ffb85/extract-utilities/0.log" Oct 13 22:12:07 crc kubenswrapper[4689]: I1013 22:12:07.887990 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5bqtl_7f15d479-e732-4ffb-bb8a-c51ce29ffb85/extract-content/0.log" Oct 13 22:12:08 crc kubenswrapper[4689]: I1013 22:12:08.067478 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5rfc2_22bd82e7-c0ff-418c-8aec-3d373e40bac3/extract-utilities/0.log" Oct 13 22:12:08 crc kubenswrapper[4689]: I1013 22:12:08.319103 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5rfc2_22bd82e7-c0ff-418c-8aec-3d373e40bac3/extract-utilities/0.log" Oct 13 22:12:08 crc kubenswrapper[4689]: I1013 22:12:08.360934 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5bqtl_7f15d479-e732-4ffb-bb8a-c51ce29ffb85/registry-server/0.log" Oct 13 22:12:08 crc kubenswrapper[4689]: I1013 22:12:08.367068 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5rfc2_22bd82e7-c0ff-418c-8aec-3d373e40bac3/extract-content/0.log" Oct 13 22:12:08 crc kubenswrapper[4689]: I1013 22:12:08.396410 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5rfc2_22bd82e7-c0ff-418c-8aec-3d373e40bac3/extract-content/0.log" Oct 13 22:12:08 crc kubenswrapper[4689]: I1013 22:12:08.541148 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5rfc2_22bd82e7-c0ff-418c-8aec-3d373e40bac3/extract-utilities/0.log" Oct 13 22:12:08 crc kubenswrapper[4689]: I1013 22:12:08.547792 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5rfc2_22bd82e7-c0ff-418c-8aec-3d373e40bac3/extract-content/0.log" Oct 13 22:12:08 crc kubenswrapper[4689]: I1013 22:12:08.745613 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm_d77292a7-4976-429d-a389-525109ea00b2/util/0.log" Oct 13 22:12:09 crc kubenswrapper[4689]: I1013 22:12:09.012524 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm_d77292a7-4976-429d-a389-525109ea00b2/util/0.log" Oct 13 22:12:09 crc kubenswrapper[4689]: I1013 22:12:09.043885 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm_d77292a7-4976-429d-a389-525109ea00b2/pull/0.log" Oct 13 22:12:09 crc kubenswrapper[4689]: I1013 22:12:09.073332 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm_d77292a7-4976-429d-a389-525109ea00b2/pull/0.log" Oct 13 22:12:09 crc kubenswrapper[4689]: I1013 22:12:09.193193 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5rfc2_22bd82e7-c0ff-418c-8aec-3d373e40bac3/registry-server/0.log" Oct 13 22:12:09 crc kubenswrapper[4689]: I1013 22:12:09.200072 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm_d77292a7-4976-429d-a389-525109ea00b2/util/0.log" Oct 13 22:12:09 crc kubenswrapper[4689]: I1013 22:12:09.236288 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm_d77292a7-4976-429d-a389-525109ea00b2/pull/0.log" Oct 13 22:12:09 crc kubenswrapper[4689]: I1013 22:12:09.305427 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm_d77292a7-4976-429d-a389-525109ea00b2/extract/0.log" Oct 13 22:12:09 crc kubenswrapper[4689]: I1013 22:12:09.381065 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-c55dv_6bdfaec3-47bd-4ca1-98f5-a5af88e0d075/marketplace-operator/0.log" Oct 13 22:12:09 crc kubenswrapper[4689]: I1013 22:12:09.500433 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6br62_1b9207c1-e871-4ab7-b030-03664c9e6af4/extract-utilities/0.log" Oct 13 22:12:09 crc kubenswrapper[4689]: I1013 22:12:09.651781 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6br62_1b9207c1-e871-4ab7-b030-03664c9e6af4/extract-content/0.log" Oct 13 22:12:09 crc kubenswrapper[4689]: I1013 22:12:09.672793 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6br62_1b9207c1-e871-4ab7-b030-03664c9e6af4/extract-content/0.log" Oct 13 22:12:09 crc kubenswrapper[4689]: I1013 22:12:09.675165 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6br62_1b9207c1-e871-4ab7-b030-03664c9e6af4/extract-utilities/0.log" Oct 13 22:12:09 crc kubenswrapper[4689]: I1013 22:12:09.840547 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6br62_1b9207c1-e871-4ab7-b030-03664c9e6af4/extract-content/0.log" Oct 13 22:12:09 crc kubenswrapper[4689]: I1013 22:12:09.841010 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6br62_1b9207c1-e871-4ab7-b030-03664c9e6af4/extract-utilities/0.log" Oct 13 22:12:09 crc kubenswrapper[4689]: I1013 22:12:09.979220 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6br62_1b9207c1-e871-4ab7-b030-03664c9e6af4/registry-server/0.log" Oct 13 22:12:10 crc kubenswrapper[4689]: I1013 22:12:10.040100 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mfrg8_d1307a0a-b3fe-421a-bcdb-b390cb59638d/extract-utilities/0.log" Oct 13 22:12:10 crc kubenswrapper[4689]: I1013 22:12:10.192106 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mfrg8_d1307a0a-b3fe-421a-bcdb-b390cb59638d/extract-content/0.log" Oct 13 22:12:10 crc kubenswrapper[4689]: I1013 22:12:10.201479 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mfrg8_d1307a0a-b3fe-421a-bcdb-b390cb59638d/extract-content/0.log" Oct 13 22:12:10 crc kubenswrapper[4689]: I1013 22:12:10.207895 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mfrg8_d1307a0a-b3fe-421a-bcdb-b390cb59638d/extract-utilities/0.log" Oct 13 22:12:10 crc kubenswrapper[4689]: I1013 22:12:10.373091 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mfrg8_d1307a0a-b3fe-421a-bcdb-b390cb59638d/extract-content/0.log" Oct 13 22:12:10 crc kubenswrapper[4689]: I1013 22:12:10.387814 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mfrg8_d1307a0a-b3fe-421a-bcdb-b390cb59638d/extract-utilities/0.log" Oct 13 22:12:10 crc kubenswrapper[4689]: I1013 22:12:10.834156 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mfrg8_d1307a0a-b3fe-421a-bcdb-b390cb59638d/registry-server/0.log" Oct 13 22:13:44 crc kubenswrapper[4689]: I1013 22:13:44.899076 4689 generic.go:334] "Generic (PLEG): container finished" podID="ee11f646-3a19-441a-abe8-86f5147df7d1" containerID="150c9f3006da464b4ad794aa19bb0e37ac71bb64ee4fa3ee3b79888d742214e0" exitCode=0 Oct 13 22:13:44 crc kubenswrapper[4689]: I1013 22:13:44.899180 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-47qhw/must-gather-rqm5w" event={"ID":"ee11f646-3a19-441a-abe8-86f5147df7d1","Type":"ContainerDied","Data":"150c9f3006da464b4ad794aa19bb0e37ac71bb64ee4fa3ee3b79888d742214e0"} Oct 13 22:13:44 crc kubenswrapper[4689]: I1013 22:13:44.900350 4689 scope.go:117] "RemoveContainer" containerID="150c9f3006da464b4ad794aa19bb0e37ac71bb64ee4fa3ee3b79888d742214e0" Oct 13 22:13:45 crc kubenswrapper[4689]: I1013 22:13:45.346275 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-47qhw_must-gather-rqm5w_ee11f646-3a19-441a-abe8-86f5147df7d1/gather/0.log" Oct 13 22:13:54 crc kubenswrapper[4689]: I1013 22:13:54.109225 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-47qhw/must-gather-rqm5w"] Oct 13 22:13:54 crc kubenswrapper[4689]: I1013 22:13:54.110020 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-47qhw/must-gather-rqm5w" podUID="ee11f646-3a19-441a-abe8-86f5147df7d1" containerName="copy" containerID="cri-o://ea84b92017234166be1c839b38aa4ba8b1a6524b826b77e49ba9288f38ab29b2" gracePeriod=2 Oct 13 22:13:54 crc kubenswrapper[4689]: I1013 22:13:54.119050 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-47qhw/must-gather-rqm5w"] Oct 13 22:13:54 crc kubenswrapper[4689]: I1013 22:13:54.527902 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-47qhw_must-gather-rqm5w_ee11f646-3a19-441a-abe8-86f5147df7d1/copy/0.log" Oct 13 22:13:54 crc kubenswrapper[4689]: I1013 22:13:54.528316 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-47qhw/must-gather-rqm5w" Oct 13 22:13:54 crc kubenswrapper[4689]: I1013 22:13:54.636957 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ee11f646-3a19-441a-abe8-86f5147df7d1-must-gather-output\") pod \"ee11f646-3a19-441a-abe8-86f5147df7d1\" (UID: \"ee11f646-3a19-441a-abe8-86f5147df7d1\") " Oct 13 22:13:54 crc kubenswrapper[4689]: I1013 22:13:54.637349 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlcmm\" (UniqueName: \"kubernetes.io/projected/ee11f646-3a19-441a-abe8-86f5147df7d1-kube-api-access-vlcmm\") pod \"ee11f646-3a19-441a-abe8-86f5147df7d1\" (UID: \"ee11f646-3a19-441a-abe8-86f5147df7d1\") " Oct 13 22:13:54 crc kubenswrapper[4689]: I1013 22:13:54.645417 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee11f646-3a19-441a-abe8-86f5147df7d1-kube-api-access-vlcmm" (OuterVolumeSpecName: "kube-api-access-vlcmm") pod "ee11f646-3a19-441a-abe8-86f5147df7d1" (UID: "ee11f646-3a19-441a-abe8-86f5147df7d1"). InnerVolumeSpecName "kube-api-access-vlcmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:13:54 crc kubenswrapper[4689]: I1013 22:13:54.739305 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlcmm\" (UniqueName: \"kubernetes.io/projected/ee11f646-3a19-441a-abe8-86f5147df7d1-kube-api-access-vlcmm\") on node \"crc\" DevicePath \"\"" Oct 13 22:13:54 crc kubenswrapper[4689]: I1013 22:13:54.767155 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee11f646-3a19-441a-abe8-86f5147df7d1-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "ee11f646-3a19-441a-abe8-86f5147df7d1" (UID: "ee11f646-3a19-441a-abe8-86f5147df7d1"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:13:54 crc kubenswrapper[4689]: I1013 22:13:54.841186 4689 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ee11f646-3a19-441a-abe8-86f5147df7d1-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 13 22:13:54 crc kubenswrapper[4689]: I1013 22:13:54.982257 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-47qhw_must-gather-rqm5w_ee11f646-3a19-441a-abe8-86f5147df7d1/copy/0.log" Oct 13 22:13:54 crc kubenswrapper[4689]: I1013 22:13:54.982651 4689 generic.go:334] "Generic (PLEG): container finished" podID="ee11f646-3a19-441a-abe8-86f5147df7d1" containerID="ea84b92017234166be1c839b38aa4ba8b1a6524b826b77e49ba9288f38ab29b2" exitCode=143 Oct 13 22:13:54 crc kubenswrapper[4689]: I1013 22:13:54.982695 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-47qhw/must-gather-rqm5w" Oct 13 22:13:54 crc kubenswrapper[4689]: I1013 22:13:54.982712 4689 scope.go:117] "RemoveContainer" containerID="ea84b92017234166be1c839b38aa4ba8b1a6524b826b77e49ba9288f38ab29b2" Oct 13 22:13:55 crc kubenswrapper[4689]: I1013 22:13:55.014709 4689 scope.go:117] "RemoveContainer" containerID="150c9f3006da464b4ad794aa19bb0e37ac71bb64ee4fa3ee3b79888d742214e0" Oct 13 22:13:55 crc kubenswrapper[4689]: I1013 22:13:55.064028 4689 scope.go:117] "RemoveContainer" containerID="ea84b92017234166be1c839b38aa4ba8b1a6524b826b77e49ba9288f38ab29b2" Oct 13 22:13:55 crc kubenswrapper[4689]: E1013 22:13:55.064523 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea84b92017234166be1c839b38aa4ba8b1a6524b826b77e49ba9288f38ab29b2\": container with ID starting with ea84b92017234166be1c839b38aa4ba8b1a6524b826b77e49ba9288f38ab29b2 not found: ID does not exist" containerID="ea84b92017234166be1c839b38aa4ba8b1a6524b826b77e49ba9288f38ab29b2" Oct 13 22:13:55 crc kubenswrapper[4689]: I1013 22:13:55.064574 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea84b92017234166be1c839b38aa4ba8b1a6524b826b77e49ba9288f38ab29b2"} err="failed to get container status \"ea84b92017234166be1c839b38aa4ba8b1a6524b826b77e49ba9288f38ab29b2\": rpc error: code = NotFound desc = could not find container \"ea84b92017234166be1c839b38aa4ba8b1a6524b826b77e49ba9288f38ab29b2\": container with ID starting with ea84b92017234166be1c839b38aa4ba8b1a6524b826b77e49ba9288f38ab29b2 not found: ID does not exist" Oct 13 22:13:55 crc kubenswrapper[4689]: I1013 22:13:55.064678 4689 scope.go:117] "RemoveContainer" containerID="150c9f3006da464b4ad794aa19bb0e37ac71bb64ee4fa3ee3b79888d742214e0" Oct 13 22:13:55 crc kubenswrapper[4689]: E1013 22:13:55.065081 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"150c9f3006da464b4ad794aa19bb0e37ac71bb64ee4fa3ee3b79888d742214e0\": container with ID starting with 150c9f3006da464b4ad794aa19bb0e37ac71bb64ee4fa3ee3b79888d742214e0 not found: ID does not exist" containerID="150c9f3006da464b4ad794aa19bb0e37ac71bb64ee4fa3ee3b79888d742214e0" Oct 13 22:13:55 crc kubenswrapper[4689]: I1013 22:13:55.065127 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"150c9f3006da464b4ad794aa19bb0e37ac71bb64ee4fa3ee3b79888d742214e0"} err="failed to get container status \"150c9f3006da464b4ad794aa19bb0e37ac71bb64ee4fa3ee3b79888d742214e0\": rpc error: code = NotFound desc = could not find container \"150c9f3006da464b4ad794aa19bb0e37ac71bb64ee4fa3ee3b79888d742214e0\": container with ID starting with 150c9f3006da464b4ad794aa19bb0e37ac71bb64ee4fa3ee3b79888d742214e0 not found: ID does not exist" Oct 13 22:13:55 crc kubenswrapper[4689]: I1013 22:13:55.879039 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee11f646-3a19-441a-abe8-86f5147df7d1" path="/var/lib/kubelet/pods/ee11f646-3a19-441a-abe8-86f5147df7d1/volumes" Oct 13 22:14:23 crc kubenswrapper[4689]: I1013 22:14:23.858804 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 22:14:23 crc kubenswrapper[4689]: I1013 22:14:23.859381 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 22:14:31 crc kubenswrapper[4689]: I1013 22:14:31.890452 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-x8sqc"] Oct 13 22:14:31 crc kubenswrapper[4689]: E1013 22:14:31.891422 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cfe7e8e-582d-4e20-ae6a-74a359582cb5" containerName="registry-server" Oct 13 22:14:31 crc kubenswrapper[4689]: I1013 22:14:31.891437 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cfe7e8e-582d-4e20-ae6a-74a359582cb5" containerName="registry-server" Oct 13 22:14:31 crc kubenswrapper[4689]: E1013 22:14:31.891451 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee11f646-3a19-441a-abe8-86f5147df7d1" containerName="copy" Oct 13 22:14:31 crc kubenswrapper[4689]: I1013 22:14:31.891460 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee11f646-3a19-441a-abe8-86f5147df7d1" containerName="copy" Oct 13 22:14:31 crc kubenswrapper[4689]: E1013 22:14:31.891473 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cfe7e8e-582d-4e20-ae6a-74a359582cb5" containerName="extract-content" Oct 13 22:14:31 crc kubenswrapper[4689]: I1013 22:14:31.891483 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cfe7e8e-582d-4e20-ae6a-74a359582cb5" containerName="extract-content" Oct 13 22:14:31 crc kubenswrapper[4689]: E1013 22:14:31.891500 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cfe7e8e-582d-4e20-ae6a-74a359582cb5" containerName="extract-utilities" Oct 13 22:14:31 crc kubenswrapper[4689]: I1013 22:14:31.891508 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cfe7e8e-582d-4e20-ae6a-74a359582cb5" containerName="extract-utilities" Oct 13 22:14:31 crc kubenswrapper[4689]: E1013 22:14:31.891528 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee11f646-3a19-441a-abe8-86f5147df7d1" containerName="gather" Oct 13 22:14:31 crc kubenswrapper[4689]: I1013 22:14:31.891535 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee11f646-3a19-441a-abe8-86f5147df7d1" containerName="gather" Oct 13 22:14:31 crc kubenswrapper[4689]: I1013 22:14:31.891787 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cfe7e8e-582d-4e20-ae6a-74a359582cb5" containerName="registry-server" Oct 13 22:14:31 crc kubenswrapper[4689]: I1013 22:14:31.891811 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee11f646-3a19-441a-abe8-86f5147df7d1" containerName="copy" Oct 13 22:14:31 crc kubenswrapper[4689]: I1013 22:14:31.891823 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee11f646-3a19-441a-abe8-86f5147df7d1" containerName="gather" Oct 13 22:14:31 crc kubenswrapper[4689]: I1013 22:14:31.893457 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x8sqc" Oct 13 22:14:31 crc kubenswrapper[4689]: I1013 22:14:31.917411 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x8sqc"] Oct 13 22:14:32 crc kubenswrapper[4689]: I1013 22:14:32.046190 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jw82\" (UniqueName: \"kubernetes.io/projected/e2d76565-b62e-49f5-b3a9-df42a4f55862-kube-api-access-9jw82\") pod \"redhat-marketplace-x8sqc\" (UID: \"e2d76565-b62e-49f5-b3a9-df42a4f55862\") " pod="openshift-marketplace/redhat-marketplace-x8sqc" Oct 13 22:14:32 crc kubenswrapper[4689]: I1013 22:14:32.046623 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2d76565-b62e-49f5-b3a9-df42a4f55862-utilities\") pod \"redhat-marketplace-x8sqc\" (UID: \"e2d76565-b62e-49f5-b3a9-df42a4f55862\") " pod="openshift-marketplace/redhat-marketplace-x8sqc" Oct 13 22:14:32 crc kubenswrapper[4689]: I1013 22:14:32.046727 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2d76565-b62e-49f5-b3a9-df42a4f55862-catalog-content\") pod \"redhat-marketplace-x8sqc\" (UID: \"e2d76565-b62e-49f5-b3a9-df42a4f55862\") " pod="openshift-marketplace/redhat-marketplace-x8sqc" Oct 13 22:14:32 crc kubenswrapper[4689]: I1013 22:14:32.148166 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jw82\" (UniqueName: \"kubernetes.io/projected/e2d76565-b62e-49f5-b3a9-df42a4f55862-kube-api-access-9jw82\") pod \"redhat-marketplace-x8sqc\" (UID: \"e2d76565-b62e-49f5-b3a9-df42a4f55862\") " pod="openshift-marketplace/redhat-marketplace-x8sqc" Oct 13 22:14:32 crc kubenswrapper[4689]: I1013 22:14:32.148261 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2d76565-b62e-49f5-b3a9-df42a4f55862-utilities\") pod \"redhat-marketplace-x8sqc\" (UID: \"e2d76565-b62e-49f5-b3a9-df42a4f55862\") " pod="openshift-marketplace/redhat-marketplace-x8sqc" Oct 13 22:14:32 crc kubenswrapper[4689]: I1013 22:14:32.148347 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2d76565-b62e-49f5-b3a9-df42a4f55862-catalog-content\") pod \"redhat-marketplace-x8sqc\" (UID: \"e2d76565-b62e-49f5-b3a9-df42a4f55862\") " pod="openshift-marketplace/redhat-marketplace-x8sqc" Oct 13 22:14:32 crc kubenswrapper[4689]: I1013 22:14:32.148757 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2d76565-b62e-49f5-b3a9-df42a4f55862-utilities\") pod \"redhat-marketplace-x8sqc\" (UID: \"e2d76565-b62e-49f5-b3a9-df42a4f55862\") " pod="openshift-marketplace/redhat-marketplace-x8sqc" Oct 13 22:14:32 crc kubenswrapper[4689]: I1013 22:14:32.148780 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2d76565-b62e-49f5-b3a9-df42a4f55862-catalog-content\") pod \"redhat-marketplace-x8sqc\" (UID: \"e2d76565-b62e-49f5-b3a9-df42a4f55862\") " pod="openshift-marketplace/redhat-marketplace-x8sqc" Oct 13 22:14:32 crc kubenswrapper[4689]: I1013 22:14:32.174970 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jw82\" (UniqueName: \"kubernetes.io/projected/e2d76565-b62e-49f5-b3a9-df42a4f55862-kube-api-access-9jw82\") pod \"redhat-marketplace-x8sqc\" (UID: \"e2d76565-b62e-49f5-b3a9-df42a4f55862\") " pod="openshift-marketplace/redhat-marketplace-x8sqc" Oct 13 22:14:32 crc kubenswrapper[4689]: I1013 22:14:32.246918 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x8sqc" Oct 13 22:14:32 crc kubenswrapper[4689]: I1013 22:14:32.763311 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x8sqc"] Oct 13 22:14:33 crc kubenswrapper[4689]: I1013 22:14:33.356158 4689 generic.go:334] "Generic (PLEG): container finished" podID="e2d76565-b62e-49f5-b3a9-df42a4f55862" containerID="a7f235ad8107c4ca58943b6d3c7b30dc2041787e5ae53ca8bdf7f3b95879e967" exitCode=0 Oct 13 22:14:33 crc kubenswrapper[4689]: I1013 22:14:33.356233 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8sqc" event={"ID":"e2d76565-b62e-49f5-b3a9-df42a4f55862","Type":"ContainerDied","Data":"a7f235ad8107c4ca58943b6d3c7b30dc2041787e5ae53ca8bdf7f3b95879e967"} Oct 13 22:14:33 crc kubenswrapper[4689]: I1013 22:14:33.356422 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8sqc" event={"ID":"e2d76565-b62e-49f5-b3a9-df42a4f55862","Type":"ContainerStarted","Data":"2b80770495b2cbeaaebeb9de1e24b12fbfcb17159074ade902799cb1f9b674ea"} Oct 13 22:14:33 crc kubenswrapper[4689]: I1013 22:14:33.358863 4689 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 22:14:34 crc kubenswrapper[4689]: I1013 22:14:34.367512 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8sqc" event={"ID":"e2d76565-b62e-49f5-b3a9-df42a4f55862","Type":"ContainerStarted","Data":"3885706be684955fb85d901f7b0a566b45cc506ac7cf7ae01a6b4641b6e8ea1a"} Oct 13 22:14:35 crc kubenswrapper[4689]: I1013 22:14:35.379950 4689 generic.go:334] "Generic (PLEG): container finished" podID="e2d76565-b62e-49f5-b3a9-df42a4f55862" containerID="3885706be684955fb85d901f7b0a566b45cc506ac7cf7ae01a6b4641b6e8ea1a" exitCode=0 Oct 13 22:14:35 crc kubenswrapper[4689]: I1013 22:14:35.380233 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8sqc" event={"ID":"e2d76565-b62e-49f5-b3a9-df42a4f55862","Type":"ContainerDied","Data":"3885706be684955fb85d901f7b0a566b45cc506ac7cf7ae01a6b4641b6e8ea1a"} Oct 13 22:14:36 crc kubenswrapper[4689]: I1013 22:14:36.392788 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8sqc" event={"ID":"e2d76565-b62e-49f5-b3a9-df42a4f55862","Type":"ContainerStarted","Data":"cf587c950ec1dc2786a8284f5f5a76a657dd7528ee95a3dfbbf3d4e669cb574a"} Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.248064 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-x8sqc" Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.248793 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-x8sqc" Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.307267 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-x8sqc" Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.329803 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-x8sqc" podStartSLOduration=8.807249062 podStartE2EDuration="11.329779665s" podCreationTimestamp="2025-10-13 22:14:31 +0000 UTC" firstStartedPulling="2025-10-13 22:14:33.35841276 +0000 UTC m=+3790.276657875" lastFinishedPulling="2025-10-13 22:14:35.880943383 +0000 UTC m=+3792.799188478" observedRunningTime="2025-10-13 22:14:36.415163171 +0000 UTC m=+3793.333408256" watchObservedRunningTime="2025-10-13 22:14:42.329779665 +0000 UTC m=+3799.248024760" Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.507101 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-x8sqc" Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.558114 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-x8sqc"] Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.676324 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-whcsp/must-gather-rwg5q"] Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.678247 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whcsp/must-gather-rwg5q" Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.687934 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-whcsp"/"openshift-service-ca.crt" Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.688042 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-whcsp"/"kube-root-ca.crt" Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.690038 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-whcsp/must-gather-rwg5q"] Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.744495 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhfdp\" (UniqueName: \"kubernetes.io/projected/77ab2cd8-390f-45e9-bf66-d147582588b3-kube-api-access-xhfdp\") pod \"must-gather-rwg5q\" (UID: \"77ab2cd8-390f-45e9-bf66-d147582588b3\") " pod="openshift-must-gather-whcsp/must-gather-rwg5q" Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.744650 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/77ab2cd8-390f-45e9-bf66-d147582588b3-must-gather-output\") pod \"must-gather-rwg5q\" (UID: \"77ab2cd8-390f-45e9-bf66-d147582588b3\") " pod="openshift-must-gather-whcsp/must-gather-rwg5q" Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.845988 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhfdp\" (UniqueName: \"kubernetes.io/projected/77ab2cd8-390f-45e9-bf66-d147582588b3-kube-api-access-xhfdp\") pod \"must-gather-rwg5q\" (UID: \"77ab2cd8-390f-45e9-bf66-d147582588b3\") " pod="openshift-must-gather-whcsp/must-gather-rwg5q" Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.846133 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/77ab2cd8-390f-45e9-bf66-d147582588b3-must-gather-output\") pod \"must-gather-rwg5q\" (UID: \"77ab2cd8-390f-45e9-bf66-d147582588b3\") " pod="openshift-must-gather-whcsp/must-gather-rwg5q" Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.846697 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/77ab2cd8-390f-45e9-bf66-d147582588b3-must-gather-output\") pod \"must-gather-rwg5q\" (UID: \"77ab2cd8-390f-45e9-bf66-d147582588b3\") " pod="openshift-must-gather-whcsp/must-gather-rwg5q" Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.865626 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhfdp\" (UniqueName: \"kubernetes.io/projected/77ab2cd8-390f-45e9-bf66-d147582588b3-kube-api-access-xhfdp\") pod \"must-gather-rwg5q\" (UID: \"77ab2cd8-390f-45e9-bf66-d147582588b3\") " pod="openshift-must-gather-whcsp/must-gather-rwg5q" Oct 13 22:14:42 crc kubenswrapper[4689]: I1013 22:14:42.999435 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whcsp/must-gather-rwg5q" Oct 13 22:14:43 crc kubenswrapper[4689]: I1013 22:14:43.451636 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-whcsp/must-gather-rwg5q"] Oct 13 22:14:44 crc kubenswrapper[4689]: I1013 22:14:44.472639 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-x8sqc" podUID="e2d76565-b62e-49f5-b3a9-df42a4f55862" containerName="registry-server" containerID="cri-o://cf587c950ec1dc2786a8284f5f5a76a657dd7528ee95a3dfbbf3d4e669cb574a" gracePeriod=2 Oct 13 22:14:44 crc kubenswrapper[4689]: I1013 22:14:44.473043 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whcsp/must-gather-rwg5q" event={"ID":"77ab2cd8-390f-45e9-bf66-d147582588b3","Type":"ContainerStarted","Data":"7463865f2126186595a56015c5c6aba5afed974f8e7634a72676af613fcb70db"} Oct 13 22:14:44 crc kubenswrapper[4689]: I1013 22:14:44.473073 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whcsp/must-gather-rwg5q" event={"ID":"77ab2cd8-390f-45e9-bf66-d147582588b3","Type":"ContainerStarted","Data":"c4c4538bcb9a0268088f591f8b9cb1fa6daac4d0fc6f127635e7a9ed29622f6d"} Oct 13 22:14:44 crc kubenswrapper[4689]: I1013 22:14:44.473086 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whcsp/must-gather-rwg5q" event={"ID":"77ab2cd8-390f-45e9-bf66-d147582588b3","Type":"ContainerStarted","Data":"cb455ab54b4bd9f27352fbc8b2f34cbbea9e96c66be359dcf01f85b64016f8ad"} Oct 13 22:14:44 crc kubenswrapper[4689]: I1013 22:14:44.506053 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-whcsp/must-gather-rwg5q" podStartSLOduration=2.5060306089999997 podStartE2EDuration="2.506030609s" podCreationTimestamp="2025-10-13 22:14:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 22:14:44.498380366 +0000 UTC m=+3801.416625471" watchObservedRunningTime="2025-10-13 22:14:44.506030609 +0000 UTC m=+3801.424275694" Oct 13 22:14:44 crc kubenswrapper[4689]: I1013 22:14:44.921194 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x8sqc" Oct 13 22:14:44 crc kubenswrapper[4689]: I1013 22:14:44.992769 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2d76565-b62e-49f5-b3a9-df42a4f55862-utilities\") pod \"e2d76565-b62e-49f5-b3a9-df42a4f55862\" (UID: \"e2d76565-b62e-49f5-b3a9-df42a4f55862\") " Oct 13 22:14:44 crc kubenswrapper[4689]: I1013 22:14:44.992902 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2d76565-b62e-49f5-b3a9-df42a4f55862-catalog-content\") pod \"e2d76565-b62e-49f5-b3a9-df42a4f55862\" (UID: \"e2d76565-b62e-49f5-b3a9-df42a4f55862\") " Oct 13 22:14:44 crc kubenswrapper[4689]: I1013 22:14:44.992935 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jw82\" (UniqueName: \"kubernetes.io/projected/e2d76565-b62e-49f5-b3a9-df42a4f55862-kube-api-access-9jw82\") pod \"e2d76565-b62e-49f5-b3a9-df42a4f55862\" (UID: \"e2d76565-b62e-49f5-b3a9-df42a4f55862\") " Oct 13 22:14:44 crc kubenswrapper[4689]: I1013 22:14:44.994513 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2d76565-b62e-49f5-b3a9-df42a4f55862-utilities" (OuterVolumeSpecName: "utilities") pod "e2d76565-b62e-49f5-b3a9-df42a4f55862" (UID: "e2d76565-b62e-49f5-b3a9-df42a4f55862"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:44.999914 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2d76565-b62e-49f5-b3a9-df42a4f55862-kube-api-access-9jw82" (OuterVolumeSpecName: "kube-api-access-9jw82") pod "e2d76565-b62e-49f5-b3a9-df42a4f55862" (UID: "e2d76565-b62e-49f5-b3a9-df42a4f55862"). InnerVolumeSpecName "kube-api-access-9jw82". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.022239 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2d76565-b62e-49f5-b3a9-df42a4f55862-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e2d76565-b62e-49f5-b3a9-df42a4f55862" (UID: "e2d76565-b62e-49f5-b3a9-df42a4f55862"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.094973 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2d76565-b62e-49f5-b3a9-df42a4f55862-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.095013 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2d76565-b62e-49f5-b3a9-df42a4f55862-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.095027 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jw82\" (UniqueName: \"kubernetes.io/projected/e2d76565-b62e-49f5-b3a9-df42a4f55862-kube-api-access-9jw82\") on node \"crc\" DevicePath \"\"" Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.483371 4689 generic.go:334] "Generic (PLEG): container finished" podID="e2d76565-b62e-49f5-b3a9-df42a4f55862" containerID="cf587c950ec1dc2786a8284f5f5a76a657dd7528ee95a3dfbbf3d4e669cb574a" exitCode=0 Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.483462 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x8sqc" Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.483468 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8sqc" event={"ID":"e2d76565-b62e-49f5-b3a9-df42a4f55862","Type":"ContainerDied","Data":"cf587c950ec1dc2786a8284f5f5a76a657dd7528ee95a3dfbbf3d4e669cb574a"} Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.483534 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8sqc" event={"ID":"e2d76565-b62e-49f5-b3a9-df42a4f55862","Type":"ContainerDied","Data":"2b80770495b2cbeaaebeb9de1e24b12fbfcb17159074ade902799cb1f9b674ea"} Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.483565 4689 scope.go:117] "RemoveContainer" containerID="cf587c950ec1dc2786a8284f5f5a76a657dd7528ee95a3dfbbf3d4e669cb574a" Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.504228 4689 scope.go:117] "RemoveContainer" containerID="3885706be684955fb85d901f7b0a566b45cc506ac7cf7ae01a6b4641b6e8ea1a" Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.516161 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-x8sqc"] Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.525031 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-x8sqc"] Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.545986 4689 scope.go:117] "RemoveContainer" containerID="a7f235ad8107c4ca58943b6d3c7b30dc2041787e5ae53ca8bdf7f3b95879e967" Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.568159 4689 scope.go:117] "RemoveContainer" containerID="cf587c950ec1dc2786a8284f5f5a76a657dd7528ee95a3dfbbf3d4e669cb574a" Oct 13 22:14:45 crc kubenswrapper[4689]: E1013 22:14:45.568736 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf587c950ec1dc2786a8284f5f5a76a657dd7528ee95a3dfbbf3d4e669cb574a\": container with ID starting with cf587c950ec1dc2786a8284f5f5a76a657dd7528ee95a3dfbbf3d4e669cb574a not found: ID does not exist" containerID="cf587c950ec1dc2786a8284f5f5a76a657dd7528ee95a3dfbbf3d4e669cb574a" Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.568782 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf587c950ec1dc2786a8284f5f5a76a657dd7528ee95a3dfbbf3d4e669cb574a"} err="failed to get container status \"cf587c950ec1dc2786a8284f5f5a76a657dd7528ee95a3dfbbf3d4e669cb574a\": rpc error: code = NotFound desc = could not find container \"cf587c950ec1dc2786a8284f5f5a76a657dd7528ee95a3dfbbf3d4e669cb574a\": container with ID starting with cf587c950ec1dc2786a8284f5f5a76a657dd7528ee95a3dfbbf3d4e669cb574a not found: ID does not exist" Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.568812 4689 scope.go:117] "RemoveContainer" containerID="3885706be684955fb85d901f7b0a566b45cc506ac7cf7ae01a6b4641b6e8ea1a" Oct 13 22:14:45 crc kubenswrapper[4689]: E1013 22:14:45.569087 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3885706be684955fb85d901f7b0a566b45cc506ac7cf7ae01a6b4641b6e8ea1a\": container with ID starting with 3885706be684955fb85d901f7b0a566b45cc506ac7cf7ae01a6b4641b6e8ea1a not found: ID does not exist" containerID="3885706be684955fb85d901f7b0a566b45cc506ac7cf7ae01a6b4641b6e8ea1a" Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.569110 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3885706be684955fb85d901f7b0a566b45cc506ac7cf7ae01a6b4641b6e8ea1a"} err="failed to get container status \"3885706be684955fb85d901f7b0a566b45cc506ac7cf7ae01a6b4641b6e8ea1a\": rpc error: code = NotFound desc = could not find container \"3885706be684955fb85d901f7b0a566b45cc506ac7cf7ae01a6b4641b6e8ea1a\": container with ID starting with 3885706be684955fb85d901f7b0a566b45cc506ac7cf7ae01a6b4641b6e8ea1a not found: ID does not exist" Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.569124 4689 scope.go:117] "RemoveContainer" containerID="a7f235ad8107c4ca58943b6d3c7b30dc2041787e5ae53ca8bdf7f3b95879e967" Oct 13 22:14:45 crc kubenswrapper[4689]: E1013 22:14:45.569340 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7f235ad8107c4ca58943b6d3c7b30dc2041787e5ae53ca8bdf7f3b95879e967\": container with ID starting with a7f235ad8107c4ca58943b6d3c7b30dc2041787e5ae53ca8bdf7f3b95879e967 not found: ID does not exist" containerID="a7f235ad8107c4ca58943b6d3c7b30dc2041787e5ae53ca8bdf7f3b95879e967" Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.569370 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7f235ad8107c4ca58943b6d3c7b30dc2041787e5ae53ca8bdf7f3b95879e967"} err="failed to get container status \"a7f235ad8107c4ca58943b6d3c7b30dc2041787e5ae53ca8bdf7f3b95879e967\": rpc error: code = NotFound desc = could not find container \"a7f235ad8107c4ca58943b6d3c7b30dc2041787e5ae53ca8bdf7f3b95879e967\": container with ID starting with a7f235ad8107c4ca58943b6d3c7b30dc2041787e5ae53ca8bdf7f3b95879e967 not found: ID does not exist" Oct 13 22:14:45 crc kubenswrapper[4689]: I1013 22:14:45.881884 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2d76565-b62e-49f5-b3a9-df42a4f55862" path="/var/lib/kubelet/pods/e2d76565-b62e-49f5-b3a9-df42a4f55862/volumes" Oct 13 22:14:47 crc kubenswrapper[4689]: I1013 22:14:47.144253 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-whcsp/crc-debug-97g4f"] Oct 13 22:14:47 crc kubenswrapper[4689]: E1013 22:14:47.145157 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2d76565-b62e-49f5-b3a9-df42a4f55862" containerName="registry-server" Oct 13 22:14:47 crc kubenswrapper[4689]: I1013 22:14:47.145175 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2d76565-b62e-49f5-b3a9-df42a4f55862" containerName="registry-server" Oct 13 22:14:47 crc kubenswrapper[4689]: E1013 22:14:47.145216 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2d76565-b62e-49f5-b3a9-df42a4f55862" containerName="extract-content" Oct 13 22:14:47 crc kubenswrapper[4689]: I1013 22:14:47.145225 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2d76565-b62e-49f5-b3a9-df42a4f55862" containerName="extract-content" Oct 13 22:14:47 crc kubenswrapper[4689]: E1013 22:14:47.145246 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2d76565-b62e-49f5-b3a9-df42a4f55862" containerName="extract-utilities" Oct 13 22:14:47 crc kubenswrapper[4689]: I1013 22:14:47.145254 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2d76565-b62e-49f5-b3a9-df42a4f55862" containerName="extract-utilities" Oct 13 22:14:47 crc kubenswrapper[4689]: I1013 22:14:47.145436 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2d76565-b62e-49f5-b3a9-df42a4f55862" containerName="registry-server" Oct 13 22:14:47 crc kubenswrapper[4689]: I1013 22:14:47.146064 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whcsp/crc-debug-97g4f" Oct 13 22:14:47 crc kubenswrapper[4689]: I1013 22:14:47.148031 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-whcsp"/"default-dockercfg-k5wx7" Oct 13 22:14:47 crc kubenswrapper[4689]: I1013 22:14:47.231268 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f3a6efda-e19f-4c73-b6fa-06ce53955ce1-host\") pod \"crc-debug-97g4f\" (UID: \"f3a6efda-e19f-4c73-b6fa-06ce53955ce1\") " pod="openshift-must-gather-whcsp/crc-debug-97g4f" Oct 13 22:14:47 crc kubenswrapper[4689]: I1013 22:14:47.231439 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9xq2\" (UniqueName: \"kubernetes.io/projected/f3a6efda-e19f-4c73-b6fa-06ce53955ce1-kube-api-access-s9xq2\") pod \"crc-debug-97g4f\" (UID: \"f3a6efda-e19f-4c73-b6fa-06ce53955ce1\") " pod="openshift-must-gather-whcsp/crc-debug-97g4f" Oct 13 22:14:47 crc kubenswrapper[4689]: I1013 22:14:47.333179 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f3a6efda-e19f-4c73-b6fa-06ce53955ce1-host\") pod \"crc-debug-97g4f\" (UID: \"f3a6efda-e19f-4c73-b6fa-06ce53955ce1\") " pod="openshift-must-gather-whcsp/crc-debug-97g4f" Oct 13 22:14:47 crc kubenswrapper[4689]: I1013 22:14:47.333242 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9xq2\" (UniqueName: \"kubernetes.io/projected/f3a6efda-e19f-4c73-b6fa-06ce53955ce1-kube-api-access-s9xq2\") pod \"crc-debug-97g4f\" (UID: \"f3a6efda-e19f-4c73-b6fa-06ce53955ce1\") " pod="openshift-must-gather-whcsp/crc-debug-97g4f" Oct 13 22:14:47 crc kubenswrapper[4689]: I1013 22:14:47.333322 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f3a6efda-e19f-4c73-b6fa-06ce53955ce1-host\") pod \"crc-debug-97g4f\" (UID: \"f3a6efda-e19f-4c73-b6fa-06ce53955ce1\") " pod="openshift-must-gather-whcsp/crc-debug-97g4f" Oct 13 22:14:47 crc kubenswrapper[4689]: I1013 22:14:47.356531 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9xq2\" (UniqueName: \"kubernetes.io/projected/f3a6efda-e19f-4c73-b6fa-06ce53955ce1-kube-api-access-s9xq2\") pod \"crc-debug-97g4f\" (UID: \"f3a6efda-e19f-4c73-b6fa-06ce53955ce1\") " pod="openshift-must-gather-whcsp/crc-debug-97g4f" Oct 13 22:14:47 crc kubenswrapper[4689]: I1013 22:14:47.469604 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whcsp/crc-debug-97g4f" Oct 13 22:14:47 crc kubenswrapper[4689]: W1013 22:14:47.500965 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf3a6efda_e19f_4c73_b6fa_06ce53955ce1.slice/crio-2e37f12060ff2549005243b7d068a27fc7da725f4bfac86a6f31a147cb9d7dcd WatchSource:0}: Error finding container 2e37f12060ff2549005243b7d068a27fc7da725f4bfac86a6f31a147cb9d7dcd: Status 404 returned error can't find the container with id 2e37f12060ff2549005243b7d068a27fc7da725f4bfac86a6f31a147cb9d7dcd Oct 13 22:14:48 crc kubenswrapper[4689]: I1013 22:14:48.509022 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whcsp/crc-debug-97g4f" event={"ID":"f3a6efda-e19f-4c73-b6fa-06ce53955ce1","Type":"ContainerStarted","Data":"6c894be0be60f336c136cff9f739ef7e2a63b2cc7260e459ba77244312571dcb"} Oct 13 22:14:48 crc kubenswrapper[4689]: I1013 22:14:48.509649 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whcsp/crc-debug-97g4f" event={"ID":"f3a6efda-e19f-4c73-b6fa-06ce53955ce1","Type":"ContainerStarted","Data":"2e37f12060ff2549005243b7d068a27fc7da725f4bfac86a6f31a147cb9d7dcd"} Oct 13 22:14:48 crc kubenswrapper[4689]: I1013 22:14:48.528802 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-whcsp/crc-debug-97g4f" podStartSLOduration=1.5287827680000001 podStartE2EDuration="1.528782768s" podCreationTimestamp="2025-10-13 22:14:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-13 22:14:48.524554498 +0000 UTC m=+3805.442799573" watchObservedRunningTime="2025-10-13 22:14:48.528782768 +0000 UTC m=+3805.447027853" Oct 13 22:14:53 crc kubenswrapper[4689]: I1013 22:14:53.859401 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 22:14:53 crc kubenswrapper[4689]: I1013 22:14:53.859966 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 22:15:00 crc kubenswrapper[4689]: I1013 22:15:00.143946 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz"] Oct 13 22:15:00 crc kubenswrapper[4689]: I1013 22:15:00.145843 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz" Oct 13 22:15:00 crc kubenswrapper[4689]: I1013 22:15:00.149037 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 13 22:15:00 crc kubenswrapper[4689]: I1013 22:15:00.160888 4689 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 13 22:15:00 crc kubenswrapper[4689]: I1013 22:15:00.176738 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hp6sm\" (UniqueName: \"kubernetes.io/projected/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-kube-api-access-hp6sm\") pod \"collect-profiles-29339895-g26qz\" (UID: \"9443b6d5-9ae9-43e7-a98c-c54d5ce26123\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz" Oct 13 22:15:00 crc kubenswrapper[4689]: I1013 22:15:00.177270 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-config-volume\") pod \"collect-profiles-29339895-g26qz\" (UID: \"9443b6d5-9ae9-43e7-a98c-c54d5ce26123\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz" Oct 13 22:15:00 crc kubenswrapper[4689]: I1013 22:15:00.177363 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-secret-volume\") pod \"collect-profiles-29339895-g26qz\" (UID: \"9443b6d5-9ae9-43e7-a98c-c54d5ce26123\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz" Oct 13 22:15:00 crc kubenswrapper[4689]: I1013 22:15:00.182038 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz"] Oct 13 22:15:00 crc kubenswrapper[4689]: I1013 22:15:00.278930 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-config-volume\") pod \"collect-profiles-29339895-g26qz\" (UID: \"9443b6d5-9ae9-43e7-a98c-c54d5ce26123\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz" Oct 13 22:15:00 crc kubenswrapper[4689]: I1013 22:15:00.279017 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-secret-volume\") pod \"collect-profiles-29339895-g26qz\" (UID: \"9443b6d5-9ae9-43e7-a98c-c54d5ce26123\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz" Oct 13 22:15:00 crc kubenswrapper[4689]: I1013 22:15:00.279082 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hp6sm\" (UniqueName: \"kubernetes.io/projected/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-kube-api-access-hp6sm\") pod \"collect-profiles-29339895-g26qz\" (UID: \"9443b6d5-9ae9-43e7-a98c-c54d5ce26123\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz" Oct 13 22:15:00 crc kubenswrapper[4689]: I1013 22:15:00.279861 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-config-volume\") pod \"collect-profiles-29339895-g26qz\" (UID: \"9443b6d5-9ae9-43e7-a98c-c54d5ce26123\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz" Oct 13 22:15:00 crc kubenswrapper[4689]: I1013 22:15:00.284744 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-secret-volume\") pod \"collect-profiles-29339895-g26qz\" (UID: \"9443b6d5-9ae9-43e7-a98c-c54d5ce26123\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz" Oct 13 22:15:00 crc kubenswrapper[4689]: I1013 22:15:00.306739 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hp6sm\" (UniqueName: \"kubernetes.io/projected/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-kube-api-access-hp6sm\") pod \"collect-profiles-29339895-g26qz\" (UID: \"9443b6d5-9ae9-43e7-a98c-c54d5ce26123\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz" Oct 13 22:15:00 crc kubenswrapper[4689]: I1013 22:15:00.485869 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz" Oct 13 22:15:00 crc kubenswrapper[4689]: I1013 22:15:00.947236 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz"] Oct 13 22:15:01 crc kubenswrapper[4689]: I1013 22:15:01.632903 4689 generic.go:334] "Generic (PLEG): container finished" podID="9443b6d5-9ae9-43e7-a98c-c54d5ce26123" containerID="b4d149732c544c1e38a29a760785d6c1537a67a763e26d466bab111030c01bed" exitCode=0 Oct 13 22:15:01 crc kubenswrapper[4689]: I1013 22:15:01.633027 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz" event={"ID":"9443b6d5-9ae9-43e7-a98c-c54d5ce26123","Type":"ContainerDied","Data":"b4d149732c544c1e38a29a760785d6c1537a67a763e26d466bab111030c01bed"} Oct 13 22:15:01 crc kubenswrapper[4689]: I1013 22:15:01.633415 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz" event={"ID":"9443b6d5-9ae9-43e7-a98c-c54d5ce26123","Type":"ContainerStarted","Data":"283ad8033307febf124727cddd8f651a405bc2babda46a4031dc1ea9df563357"} Oct 13 22:15:03 crc kubenswrapper[4689]: I1013 22:15:02.972333 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz" Oct 13 22:15:03 crc kubenswrapper[4689]: I1013 22:15:03.131381 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-config-volume\") pod \"9443b6d5-9ae9-43e7-a98c-c54d5ce26123\" (UID: \"9443b6d5-9ae9-43e7-a98c-c54d5ce26123\") " Oct 13 22:15:03 crc kubenswrapper[4689]: I1013 22:15:03.131550 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hp6sm\" (UniqueName: \"kubernetes.io/projected/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-kube-api-access-hp6sm\") pod \"9443b6d5-9ae9-43e7-a98c-c54d5ce26123\" (UID: \"9443b6d5-9ae9-43e7-a98c-c54d5ce26123\") " Oct 13 22:15:03 crc kubenswrapper[4689]: I1013 22:15:03.131669 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-secret-volume\") pod \"9443b6d5-9ae9-43e7-a98c-c54d5ce26123\" (UID: \"9443b6d5-9ae9-43e7-a98c-c54d5ce26123\") " Oct 13 22:15:03 crc kubenswrapper[4689]: I1013 22:15:03.132343 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-config-volume" (OuterVolumeSpecName: "config-volume") pod "9443b6d5-9ae9-43e7-a98c-c54d5ce26123" (UID: "9443b6d5-9ae9-43e7-a98c-c54d5ce26123"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 13 22:15:03 crc kubenswrapper[4689]: I1013 22:15:03.133095 4689 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-config-volume\") on node \"crc\" DevicePath \"\"" Oct 13 22:15:03 crc kubenswrapper[4689]: I1013 22:15:03.138057 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-kube-api-access-hp6sm" (OuterVolumeSpecName: "kube-api-access-hp6sm") pod "9443b6d5-9ae9-43e7-a98c-c54d5ce26123" (UID: "9443b6d5-9ae9-43e7-a98c-c54d5ce26123"). InnerVolumeSpecName "kube-api-access-hp6sm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:15:03 crc kubenswrapper[4689]: I1013 22:15:03.140802 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9443b6d5-9ae9-43e7-a98c-c54d5ce26123" (UID: "9443b6d5-9ae9-43e7-a98c-c54d5ce26123"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 13 22:15:03 crc kubenswrapper[4689]: I1013 22:15:03.235078 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hp6sm\" (UniqueName: \"kubernetes.io/projected/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-kube-api-access-hp6sm\") on node \"crc\" DevicePath \"\"" Oct 13 22:15:03 crc kubenswrapper[4689]: I1013 22:15:03.235105 4689 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9443b6d5-9ae9-43e7-a98c-c54d5ce26123-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 13 22:15:03 crc kubenswrapper[4689]: I1013 22:15:03.653533 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz" event={"ID":"9443b6d5-9ae9-43e7-a98c-c54d5ce26123","Type":"ContainerDied","Data":"283ad8033307febf124727cddd8f651a405bc2babda46a4031dc1ea9df563357"} Oct 13 22:15:03 crc kubenswrapper[4689]: I1013 22:15:03.653866 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="283ad8033307febf124727cddd8f651a405bc2babda46a4031dc1ea9df563357" Oct 13 22:15:03 crc kubenswrapper[4689]: I1013 22:15:03.653701 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29339895-g26qz" Oct 13 22:15:04 crc kubenswrapper[4689]: I1013 22:15:04.041479 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf"] Oct 13 22:15:04 crc kubenswrapper[4689]: I1013 22:15:04.049015 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29339850-jcjzf"] Oct 13 22:15:05 crc kubenswrapper[4689]: I1013 22:15:05.877625 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30464507-4f21-4110-8238-9698b62b8fe3" path="/var/lib/kubelet/pods/30464507-4f21-4110-8238-9698b62b8fe3/volumes" Oct 13 22:15:20 crc kubenswrapper[4689]: I1013 22:15:20.794493 4689 generic.go:334] "Generic (PLEG): container finished" podID="f3a6efda-e19f-4c73-b6fa-06ce53955ce1" containerID="6c894be0be60f336c136cff9f739ef7e2a63b2cc7260e459ba77244312571dcb" exitCode=0 Oct 13 22:15:20 crc kubenswrapper[4689]: I1013 22:15:20.794574 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whcsp/crc-debug-97g4f" event={"ID":"f3a6efda-e19f-4c73-b6fa-06ce53955ce1","Type":"ContainerDied","Data":"6c894be0be60f336c136cff9f739ef7e2a63b2cc7260e459ba77244312571dcb"} Oct 13 22:15:21 crc kubenswrapper[4689]: I1013 22:15:21.899628 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whcsp/crc-debug-97g4f" Oct 13 22:15:21 crc kubenswrapper[4689]: I1013 22:15:21.933880 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-whcsp/crc-debug-97g4f"] Oct 13 22:15:21 crc kubenswrapper[4689]: I1013 22:15:21.941286 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-whcsp/crc-debug-97g4f"] Oct 13 22:15:22 crc kubenswrapper[4689]: I1013 22:15:22.088820 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f3a6efda-e19f-4c73-b6fa-06ce53955ce1-host\") pod \"f3a6efda-e19f-4c73-b6fa-06ce53955ce1\" (UID: \"f3a6efda-e19f-4c73-b6fa-06ce53955ce1\") " Oct 13 22:15:22 crc kubenswrapper[4689]: I1013 22:15:22.088937 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f3a6efda-e19f-4c73-b6fa-06ce53955ce1-host" (OuterVolumeSpecName: "host") pod "f3a6efda-e19f-4c73-b6fa-06ce53955ce1" (UID: "f3a6efda-e19f-4c73-b6fa-06ce53955ce1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 22:15:22 crc kubenswrapper[4689]: I1013 22:15:22.089055 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9xq2\" (UniqueName: \"kubernetes.io/projected/f3a6efda-e19f-4c73-b6fa-06ce53955ce1-kube-api-access-s9xq2\") pod \"f3a6efda-e19f-4c73-b6fa-06ce53955ce1\" (UID: \"f3a6efda-e19f-4c73-b6fa-06ce53955ce1\") " Oct 13 22:15:22 crc kubenswrapper[4689]: I1013 22:15:22.089389 4689 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f3a6efda-e19f-4c73-b6fa-06ce53955ce1-host\") on node \"crc\" DevicePath \"\"" Oct 13 22:15:22 crc kubenswrapper[4689]: I1013 22:15:22.094205 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3a6efda-e19f-4c73-b6fa-06ce53955ce1-kube-api-access-s9xq2" (OuterVolumeSpecName: "kube-api-access-s9xq2") pod "f3a6efda-e19f-4c73-b6fa-06ce53955ce1" (UID: "f3a6efda-e19f-4c73-b6fa-06ce53955ce1"). InnerVolumeSpecName "kube-api-access-s9xq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:15:22 crc kubenswrapper[4689]: I1013 22:15:22.191108 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9xq2\" (UniqueName: \"kubernetes.io/projected/f3a6efda-e19f-4c73-b6fa-06ce53955ce1-kube-api-access-s9xq2\") on node \"crc\" DevicePath \"\"" Oct 13 22:15:22 crc kubenswrapper[4689]: I1013 22:15:22.814435 4689 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e37f12060ff2549005243b7d068a27fc7da725f4bfac86a6f31a147cb9d7dcd" Oct 13 22:15:22 crc kubenswrapper[4689]: I1013 22:15:22.814511 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whcsp/crc-debug-97g4f" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.106126 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-whcsp/crc-debug-bflqv"] Oct 13 22:15:23 crc kubenswrapper[4689]: E1013 22:15:23.107028 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3a6efda-e19f-4c73-b6fa-06ce53955ce1" containerName="container-00" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.107049 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3a6efda-e19f-4c73-b6fa-06ce53955ce1" containerName="container-00" Oct 13 22:15:23 crc kubenswrapper[4689]: E1013 22:15:23.107098 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9443b6d5-9ae9-43e7-a98c-c54d5ce26123" containerName="collect-profiles" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.107107 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="9443b6d5-9ae9-43e7-a98c-c54d5ce26123" containerName="collect-profiles" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.107399 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3a6efda-e19f-4c73-b6fa-06ce53955ce1" containerName="container-00" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.107444 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="9443b6d5-9ae9-43e7-a98c-c54d5ce26123" containerName="collect-profiles" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.108333 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whcsp/crc-debug-bflqv" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.110307 4689 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-whcsp"/"default-dockercfg-k5wx7" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.211238 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txgxr\" (UniqueName: \"kubernetes.io/projected/8244e583-5f1c-47bb-a2d9-c587742d7f85-kube-api-access-txgxr\") pod \"crc-debug-bflqv\" (UID: \"8244e583-5f1c-47bb-a2d9-c587742d7f85\") " pod="openshift-must-gather-whcsp/crc-debug-bflqv" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.211623 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8244e583-5f1c-47bb-a2d9-c587742d7f85-host\") pod \"crc-debug-bflqv\" (UID: \"8244e583-5f1c-47bb-a2d9-c587742d7f85\") " pod="openshift-must-gather-whcsp/crc-debug-bflqv" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.314405 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txgxr\" (UniqueName: \"kubernetes.io/projected/8244e583-5f1c-47bb-a2d9-c587742d7f85-kube-api-access-txgxr\") pod \"crc-debug-bflqv\" (UID: \"8244e583-5f1c-47bb-a2d9-c587742d7f85\") " pod="openshift-must-gather-whcsp/crc-debug-bflqv" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.314563 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8244e583-5f1c-47bb-a2d9-c587742d7f85-host\") pod \"crc-debug-bflqv\" (UID: \"8244e583-5f1c-47bb-a2d9-c587742d7f85\") " pod="openshift-must-gather-whcsp/crc-debug-bflqv" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.314708 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8244e583-5f1c-47bb-a2d9-c587742d7f85-host\") pod \"crc-debug-bflqv\" (UID: \"8244e583-5f1c-47bb-a2d9-c587742d7f85\") " pod="openshift-must-gather-whcsp/crc-debug-bflqv" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.338514 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txgxr\" (UniqueName: \"kubernetes.io/projected/8244e583-5f1c-47bb-a2d9-c587742d7f85-kube-api-access-txgxr\") pod \"crc-debug-bflqv\" (UID: \"8244e583-5f1c-47bb-a2d9-c587742d7f85\") " pod="openshift-must-gather-whcsp/crc-debug-bflqv" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.424871 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whcsp/crc-debug-bflqv" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.829476 4689 generic.go:334] "Generic (PLEG): container finished" podID="8244e583-5f1c-47bb-a2d9-c587742d7f85" containerID="937ccf9b795d1e15b51e8c2bf15e727c60354ba57d9b053532a8c97a30aae0ca" exitCode=0 Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.829526 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whcsp/crc-debug-bflqv" event={"ID":"8244e583-5f1c-47bb-a2d9-c587742d7f85","Type":"ContainerDied","Data":"937ccf9b795d1e15b51e8c2bf15e727c60354ba57d9b053532a8c97a30aae0ca"} Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.829556 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whcsp/crc-debug-bflqv" event={"ID":"8244e583-5f1c-47bb-a2d9-c587742d7f85","Type":"ContainerStarted","Data":"641fce9b00f160a3786945cc89eedcd16df2f34880a98e66b573dfbb92d9113a"} Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.858771 4689 patch_prober.go:28] interesting pod/machine-config-daemon-w5fqm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.858820 4689 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.858861 4689 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.860538 4689 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d"} pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.860630 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" containerName="machine-config-daemon" containerID="cri-o://b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" gracePeriod=600 Oct 13 22:15:23 crc kubenswrapper[4689]: I1013 22:15:23.883082 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3a6efda-e19f-4c73-b6fa-06ce53955ce1" path="/var/lib/kubelet/pods/f3a6efda-e19f-4c73-b6fa-06ce53955ce1/volumes" Oct 13 22:15:24 crc kubenswrapper[4689]: E1013 22:15:24.010268 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:15:24 crc kubenswrapper[4689]: E1013 22:15:24.214891 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d.scope\": RecentStats: unable to find data in memory cache]" Oct 13 22:15:24 crc kubenswrapper[4689]: I1013 22:15:24.288643 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-whcsp/crc-debug-bflqv"] Oct 13 22:15:24 crc kubenswrapper[4689]: I1013 22:15:24.295239 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-whcsp/crc-debug-bflqv"] Oct 13 22:15:24 crc kubenswrapper[4689]: I1013 22:15:24.848814 4689 generic.go:334] "Generic (PLEG): container finished" podID="1863da92-265f-451e-a741-a184c8d3f781" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" exitCode=0 Oct 13 22:15:24 crc kubenswrapper[4689]: I1013 22:15:24.849181 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerDied","Data":"b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d"} Oct 13 22:15:24 crc kubenswrapper[4689]: I1013 22:15:24.849229 4689 scope.go:117] "RemoveContainer" containerID="8308c97505954cfd53e34046f29d414baee75c943899bf62fc757653698c4248" Oct 13 22:15:24 crc kubenswrapper[4689]: I1013 22:15:24.853343 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:15:24 crc kubenswrapper[4689]: E1013 22:15:24.855693 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:15:24 crc kubenswrapper[4689]: I1013 22:15:24.948098 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whcsp/crc-debug-bflqv" Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.054725 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8244e583-5f1c-47bb-a2d9-c587742d7f85-host\") pod \"8244e583-5f1c-47bb-a2d9-c587742d7f85\" (UID: \"8244e583-5f1c-47bb-a2d9-c587742d7f85\") " Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.055041 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txgxr\" (UniqueName: \"kubernetes.io/projected/8244e583-5f1c-47bb-a2d9-c587742d7f85-kube-api-access-txgxr\") pod \"8244e583-5f1c-47bb-a2d9-c587742d7f85\" (UID: \"8244e583-5f1c-47bb-a2d9-c587742d7f85\") " Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.055223 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8244e583-5f1c-47bb-a2d9-c587742d7f85-host" (OuterVolumeSpecName: "host") pod "8244e583-5f1c-47bb-a2d9-c587742d7f85" (UID: "8244e583-5f1c-47bb-a2d9-c587742d7f85"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.055740 4689 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8244e583-5f1c-47bb-a2d9-c587742d7f85-host\") on node \"crc\" DevicePath \"\"" Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.061844 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8244e583-5f1c-47bb-a2d9-c587742d7f85-kube-api-access-txgxr" (OuterVolumeSpecName: "kube-api-access-txgxr") pod "8244e583-5f1c-47bb-a2d9-c587742d7f85" (UID: "8244e583-5f1c-47bb-a2d9-c587742d7f85"). InnerVolumeSpecName "kube-api-access-txgxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.157354 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txgxr\" (UniqueName: \"kubernetes.io/projected/8244e583-5f1c-47bb-a2d9-c587742d7f85-kube-api-access-txgxr\") on node \"crc\" DevicePath \"\"" Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.481049 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-whcsp/crc-debug-5ftc2"] Oct 13 22:15:25 crc kubenswrapper[4689]: E1013 22:15:25.481455 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8244e583-5f1c-47bb-a2d9-c587742d7f85" containerName="container-00" Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.481467 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="8244e583-5f1c-47bb-a2d9-c587742d7f85" containerName="container-00" Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.481791 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="8244e583-5f1c-47bb-a2d9-c587742d7f85" containerName="container-00" Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.482751 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whcsp/crc-debug-5ftc2" Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.666368 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6efa7046-25d8-420f-8650-7ef375427162-host\") pod \"crc-debug-5ftc2\" (UID: \"6efa7046-25d8-420f-8650-7ef375427162\") " pod="openshift-must-gather-whcsp/crc-debug-5ftc2" Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.666459 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtdj9\" (UniqueName: \"kubernetes.io/projected/6efa7046-25d8-420f-8650-7ef375427162-kube-api-access-qtdj9\") pod \"crc-debug-5ftc2\" (UID: \"6efa7046-25d8-420f-8650-7ef375427162\") " pod="openshift-must-gather-whcsp/crc-debug-5ftc2" Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.768638 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6efa7046-25d8-420f-8650-7ef375427162-host\") pod \"crc-debug-5ftc2\" (UID: \"6efa7046-25d8-420f-8650-7ef375427162\") " pod="openshift-must-gather-whcsp/crc-debug-5ftc2" Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.768703 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtdj9\" (UniqueName: \"kubernetes.io/projected/6efa7046-25d8-420f-8650-7ef375427162-kube-api-access-qtdj9\") pod \"crc-debug-5ftc2\" (UID: \"6efa7046-25d8-420f-8650-7ef375427162\") " pod="openshift-must-gather-whcsp/crc-debug-5ftc2" Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.768794 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6efa7046-25d8-420f-8650-7ef375427162-host\") pod \"crc-debug-5ftc2\" (UID: \"6efa7046-25d8-420f-8650-7ef375427162\") " pod="openshift-must-gather-whcsp/crc-debug-5ftc2" Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.787403 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtdj9\" (UniqueName: \"kubernetes.io/projected/6efa7046-25d8-420f-8650-7ef375427162-kube-api-access-qtdj9\") pod \"crc-debug-5ftc2\" (UID: \"6efa7046-25d8-420f-8650-7ef375427162\") " pod="openshift-must-gather-whcsp/crc-debug-5ftc2" Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.802023 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whcsp/crc-debug-5ftc2" Oct 13 22:15:25 crc kubenswrapper[4689]: W1013 22:15:25.841949 4689 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6efa7046_25d8_420f_8650_7ef375427162.slice/crio-456e285497ca03892f448d70d389d305c4e81e97e038ff9597f5d4d9cf80ae2d WatchSource:0}: Error finding container 456e285497ca03892f448d70d389d305c4e81e97e038ff9597f5d4d9cf80ae2d: Status 404 returned error can't find the container with id 456e285497ca03892f448d70d389d305c4e81e97e038ff9597f5d4d9cf80ae2d Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.876321 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whcsp/crc-debug-bflqv" Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.896472 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8244e583-5f1c-47bb-a2d9-c587742d7f85" path="/var/lib/kubelet/pods/8244e583-5f1c-47bb-a2d9-c587742d7f85/volumes" Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.897205 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whcsp/crc-debug-5ftc2" event={"ID":"6efa7046-25d8-420f-8650-7ef375427162","Type":"ContainerStarted","Data":"456e285497ca03892f448d70d389d305c4e81e97e038ff9597f5d4d9cf80ae2d"} Oct 13 22:15:25 crc kubenswrapper[4689]: I1013 22:15:25.897256 4689 scope.go:117] "RemoveContainer" containerID="937ccf9b795d1e15b51e8c2bf15e727c60354ba57d9b053532a8c97a30aae0ca" Oct 13 22:15:26 crc kubenswrapper[4689]: I1013 22:15:26.906879 4689 generic.go:334] "Generic (PLEG): container finished" podID="6efa7046-25d8-420f-8650-7ef375427162" containerID="975f0a035fa288a2e7819b1665f4b079e91ffc1fe3f6032ef6ae4fc243895e6c" exitCode=0 Oct 13 22:15:26 crc kubenswrapper[4689]: I1013 22:15:26.906938 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whcsp/crc-debug-5ftc2" event={"ID":"6efa7046-25d8-420f-8650-7ef375427162","Type":"ContainerDied","Data":"975f0a035fa288a2e7819b1665f4b079e91ffc1fe3f6032ef6ae4fc243895e6c"} Oct 13 22:15:26 crc kubenswrapper[4689]: I1013 22:15:26.943793 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-whcsp/crc-debug-5ftc2"] Oct 13 22:15:26 crc kubenswrapper[4689]: I1013 22:15:26.951418 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-whcsp/crc-debug-5ftc2"] Oct 13 22:15:28 crc kubenswrapper[4689]: I1013 22:15:28.013039 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whcsp/crc-debug-5ftc2" Oct 13 22:15:28 crc kubenswrapper[4689]: I1013 22:15:28.210758 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6efa7046-25d8-420f-8650-7ef375427162-host\") pod \"6efa7046-25d8-420f-8650-7ef375427162\" (UID: \"6efa7046-25d8-420f-8650-7ef375427162\") " Oct 13 22:15:28 crc kubenswrapper[4689]: I1013 22:15:28.210899 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtdj9\" (UniqueName: \"kubernetes.io/projected/6efa7046-25d8-420f-8650-7ef375427162-kube-api-access-qtdj9\") pod \"6efa7046-25d8-420f-8650-7ef375427162\" (UID: \"6efa7046-25d8-420f-8650-7ef375427162\") " Oct 13 22:15:28 crc kubenswrapper[4689]: I1013 22:15:28.211068 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6efa7046-25d8-420f-8650-7ef375427162-host" (OuterVolumeSpecName: "host") pod "6efa7046-25d8-420f-8650-7ef375427162" (UID: "6efa7046-25d8-420f-8650-7ef375427162"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 13 22:15:28 crc kubenswrapper[4689]: I1013 22:15:28.211315 4689 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6efa7046-25d8-420f-8650-7ef375427162-host\") on node \"crc\" DevicePath \"\"" Oct 13 22:15:28 crc kubenswrapper[4689]: I1013 22:15:28.216386 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6efa7046-25d8-420f-8650-7ef375427162-kube-api-access-qtdj9" (OuterVolumeSpecName: "kube-api-access-qtdj9") pod "6efa7046-25d8-420f-8650-7ef375427162" (UID: "6efa7046-25d8-420f-8650-7ef375427162"). InnerVolumeSpecName "kube-api-access-qtdj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:15:28 crc kubenswrapper[4689]: I1013 22:15:28.313185 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtdj9\" (UniqueName: \"kubernetes.io/projected/6efa7046-25d8-420f-8650-7ef375427162-kube-api-access-qtdj9\") on node \"crc\" DevicePath \"\"" Oct 13 22:15:28 crc kubenswrapper[4689]: I1013 22:15:28.926522 4689 scope.go:117] "RemoveContainer" containerID="975f0a035fa288a2e7819b1665f4b079e91ffc1fe3f6032ef6ae4fc243895e6c" Oct 13 22:15:28 crc kubenswrapper[4689]: I1013 22:15:28.926542 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whcsp/crc-debug-5ftc2" Oct 13 22:15:29 crc kubenswrapper[4689]: I1013 22:15:29.745349 4689 scope.go:117] "RemoveContainer" containerID="5f8a2c628ff4e194a52ad839c7c45757039e1d6d34184a9a944fe93208f8afe3" Oct 13 22:15:29 crc kubenswrapper[4689]: I1013 22:15:29.879361 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6efa7046-25d8-420f-8650-7ef375427162" path="/var/lib/kubelet/pods/6efa7046-25d8-420f-8650-7ef375427162/volumes" Oct 13 22:15:34 crc kubenswrapper[4689]: E1013 22:15:34.478216 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d.scope\": RecentStats: unable to find data in memory cache]" Oct 13 22:15:35 crc kubenswrapper[4689]: I1013 22:15:35.867084 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:15:35 crc kubenswrapper[4689]: E1013 22:15:35.867606 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:15:41 crc kubenswrapper[4689]: I1013 22:15:41.041277 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6545bbd774-g576b_839e3e00-f791-4fb8-8df0-677c8e9a0c27/barbican-api/0.log" Oct 13 22:15:41 crc kubenswrapper[4689]: I1013 22:15:41.216370 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6545bbd774-g576b_839e3e00-f791-4fb8-8df0-677c8e9a0c27/barbican-api-log/0.log" Oct 13 22:15:41 crc kubenswrapper[4689]: I1013 22:15:41.270971 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7bb46f45d-mgdw4_94bcd39d-bca7-4d51-9327-aec08e22b60a/barbican-keystone-listener/0.log" Oct 13 22:15:41 crc kubenswrapper[4689]: I1013 22:15:41.329794 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7bb46f45d-mgdw4_94bcd39d-bca7-4d51-9327-aec08e22b60a/barbican-keystone-listener-log/0.log" Oct 13 22:15:41 crc kubenswrapper[4689]: I1013 22:15:41.443282 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5f6bb4646c-c8vt4_be6e9b94-6d9a-46ae-ae15-5d9516e4ee47/barbican-worker/0.log" Oct 13 22:15:41 crc kubenswrapper[4689]: I1013 22:15:41.448812 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5f6bb4646c-c8vt4_be6e9b94-6d9a-46ae-ae15-5d9516e4ee47/barbican-worker-log/0.log" Oct 13 22:15:41 crc kubenswrapper[4689]: I1013 22:15:41.648972 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-wlvlk_1355811c-482d-4b45-b7cb-7e16b64debf6/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:15:41 crc kubenswrapper[4689]: I1013 22:15:41.800175 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_08fb7ee0-eee9-49f6-82e2-007abc19bd3b/ceilometer-notification-agent/0.log" Oct 13 22:15:41 crc kubenswrapper[4689]: I1013 22:15:41.835121 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_08fb7ee0-eee9-49f6-82e2-007abc19bd3b/ceilometer-central-agent/0.log" Oct 13 22:15:41 crc kubenswrapper[4689]: I1013 22:15:41.855106 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_08fb7ee0-eee9-49f6-82e2-007abc19bd3b/proxy-httpd/0.log" Oct 13 22:15:41 crc kubenswrapper[4689]: I1013 22:15:41.909650 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_08fb7ee0-eee9-49f6-82e2-007abc19bd3b/sg-core/0.log" Oct 13 22:15:42 crc kubenswrapper[4689]: I1013 22:15:42.068287 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_2e47a212-9a97-447e-97d9-2686a2937a05/cinder-api-log/0.log" Oct 13 22:15:42 crc kubenswrapper[4689]: I1013 22:15:42.073829 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_2e47a212-9a97-447e-97d9-2686a2937a05/cinder-api/0.log" Oct 13 22:15:42 crc kubenswrapper[4689]: I1013 22:15:42.316905 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0d79575d-670f-47b0-83a6-9c2b36f8ffd0/cinder-scheduler/0.log" Oct 13 22:15:42 crc kubenswrapper[4689]: I1013 22:15:42.335744 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0d79575d-670f-47b0-83a6-9c2b36f8ffd0/probe/0.log" Oct 13 22:15:42 crc kubenswrapper[4689]: I1013 22:15:42.342371 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-cltfg_a8d4f189-4446-410c-8cfd-b1cf669221db/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:15:42 crc kubenswrapper[4689]: I1013 22:15:42.498995 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-56hq2_c1cb5a31-9872-40a5-acb9-6755720fe782/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:15:42 crc kubenswrapper[4689]: I1013 22:15:42.507220 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-4b87s_9d6b52af-e31e-464d-a83b-ce21d37da265/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:15:42 crc kubenswrapper[4689]: I1013 22:15:42.678908 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-vrcxh_0c646c73-577a-42ba-8aa7-39bac477cb15/init/0.log" Oct 13 22:15:42 crc kubenswrapper[4689]: I1013 22:15:42.851194 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-vrcxh_0c646c73-577a-42ba-8aa7-39bac477cb15/init/0.log" Oct 13 22:15:42 crc kubenswrapper[4689]: I1013 22:15:42.915572 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-vrcxh_0c646c73-577a-42ba-8aa7-39bac477cb15/dnsmasq-dns/0.log" Oct 13 22:15:42 crc kubenswrapper[4689]: I1013 22:15:42.943428 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-9dztk_f9b26af4-3dba-452d-9d66-715747d10f18/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:15:43 crc kubenswrapper[4689]: I1013 22:15:43.091627 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_2a6e5dd8-c0bc-49ec-b03a-9971dbd85486/glance-httpd/0.log" Oct 13 22:15:43 crc kubenswrapper[4689]: I1013 22:15:43.111222 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_2a6e5dd8-c0bc-49ec-b03a-9971dbd85486/glance-log/0.log" Oct 13 22:15:43 crc kubenswrapper[4689]: I1013 22:15:43.246625 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47/glance-httpd/0.log" Oct 13 22:15:43 crc kubenswrapper[4689]: I1013 22:15:43.294759 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_9735f3f4-b1c0-49c3-80d8-b6cbbc5eea47/glance-log/0.log" Oct 13 22:15:43 crc kubenswrapper[4689]: I1013 22:15:43.439060 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-56b8966ffb-99krc_d46a395d-e4aa-45cb-85a7-86a43d5d7371/horizon/0.log" Oct 13 22:15:43 crc kubenswrapper[4689]: I1013 22:15:43.655220 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-48qv8_af61fcd9-0c85-418a-8329-0a0dc4236f35/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:15:43 crc kubenswrapper[4689]: I1013 22:15:43.761937 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-pdv7h_194d986e-a55b-472d-880a-789fe09fcac0/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:15:43 crc kubenswrapper[4689]: I1013 22:15:43.820030 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-56b8966ffb-99krc_d46a395d-e4aa-45cb-85a7-86a43d5d7371/horizon-log/0.log" Oct 13 22:15:44 crc kubenswrapper[4689]: I1013 22:15:44.050702 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6f78847c76-48zjm_bf43c63a-d1f7-492b-9345-d271dd62a7d2/keystone-api/0.log" Oct 13 22:15:44 crc kubenswrapper[4689]: I1013 22:15:44.059262 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29339881-4jbhr_d3d13686-7ebb-4ffa-bde4-4c36501a6b21/keystone-cron/0.log" Oct 13 22:15:44 crc kubenswrapper[4689]: I1013 22:15:44.185740 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_0c2195ac-449d-47d0-9a1b-b512a0c6b44e/kube-state-metrics/0.log" Oct 13 22:15:44 crc kubenswrapper[4689]: I1013 22:15:44.284725 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-mxt8c_5c27fced-a27b-4b4f-bc40-cdcb566eb633/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:15:44 crc kubenswrapper[4689]: I1013 22:15:44.687846 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9b98684c9-9h5ml_4d52e532-8731-4838-9e3b-e316a722a0a6/neutron-api/0.log" Oct 13 22:15:44 crc kubenswrapper[4689]: I1013 22:15:44.705328 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9b98684c9-9h5ml_4d52e532-8731-4838-9e3b-e316a722a0a6/neutron-httpd/0.log" Oct 13 22:15:44 crc kubenswrapper[4689]: E1013 22:15:44.724874 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d.scope\": RecentStats: unable to find data in memory cache]" Oct 13 22:15:44 crc kubenswrapper[4689]: I1013 22:15:44.897823 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-dfxvz_d7480298-e4a8-4010-a526-9ca1dba08f71/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:15:45 crc kubenswrapper[4689]: I1013 22:15:45.443939 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_4625bed2-2e08-4399-a0a8-fcb62b4239bc/nova-api-log/0.log" Oct 13 22:15:45 crc kubenswrapper[4689]: I1013 22:15:45.453511 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_3d4d915e-c42b-4389-bd38-49fc12bc950a/nova-cell0-conductor-conductor/0.log" Oct 13 22:15:45 crc kubenswrapper[4689]: I1013 22:15:45.700848 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_573c1817-260c-43b0-a892-f393e2d4ba07/nova-cell1-conductor-conductor/0.log" Oct 13 22:15:45 crc kubenswrapper[4689]: I1013 22:15:45.769088 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_4625bed2-2e08-4399-a0a8-fcb62b4239bc/nova-api-api/0.log" Oct 13 22:15:45 crc kubenswrapper[4689]: I1013 22:15:45.790538 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_7b0b0a0a-623c-42db-abb2-50a50d924793/nova-cell1-novncproxy-novncproxy/0.log" Oct 13 22:15:46 crc kubenswrapper[4689]: I1013 22:15:46.004836 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-jqvm9_88053993-c10c-49d4-b69a-82c745001999/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:15:46 crc kubenswrapper[4689]: I1013 22:15:46.152603 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_dd6f475e-e9b6-421d-9897-1b5a8a748a2a/nova-metadata-log/0.log" Oct 13 22:15:46 crc kubenswrapper[4689]: I1013 22:15:46.452052 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_06de4aaa-8949-449e-bb2f-65f4cffa4954/nova-scheduler-scheduler/0.log" Oct 13 22:15:46 crc kubenswrapper[4689]: I1013 22:15:46.470705 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4fc44e1c-da65-48c1-ad48-8b41c9bf4391/mysql-bootstrap/0.log" Oct 13 22:15:46 crc kubenswrapper[4689]: I1013 22:15:46.651778 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4fc44e1c-da65-48c1-ad48-8b41c9bf4391/galera/0.log" Oct 13 22:15:46 crc kubenswrapper[4689]: I1013 22:15:46.696994 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4fc44e1c-da65-48c1-ad48-8b41c9bf4391/mysql-bootstrap/0.log" Oct 13 22:15:46 crc kubenswrapper[4689]: I1013 22:15:46.846533 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_241dd2d8-e2a2-4653-bfc9-24255216fad4/mysql-bootstrap/0.log" Oct 13 22:15:47 crc kubenswrapper[4689]: I1013 22:15:47.027673 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_241dd2d8-e2a2-4653-bfc9-24255216fad4/galera/0.log" Oct 13 22:15:47 crc kubenswrapper[4689]: I1013 22:15:47.044554 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_241dd2d8-e2a2-4653-bfc9-24255216fad4/mysql-bootstrap/0.log" Oct 13 22:15:47 crc kubenswrapper[4689]: I1013 22:15:47.224118 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_10f0cb83-9cb9-48d1-8b9e-2217c48790d9/openstackclient/0.log" Oct 13 22:15:47 crc kubenswrapper[4689]: I1013 22:15:47.326133 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-8t9jt_596fffc8-5b10-4da9-950c-ac58fafd2eb2/ovn-controller/0.log" Oct 13 22:15:47 crc kubenswrapper[4689]: I1013 22:15:47.480730 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-x69m8_56a091d6-e531-4956-b5aa-15f43a9c1038/openstack-network-exporter/0.log" Oct 13 22:15:47 crc kubenswrapper[4689]: I1013 22:15:47.552124 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_dd6f475e-e9b6-421d-9897-1b5a8a748a2a/nova-metadata-metadata/0.log" Oct 13 22:15:47 crc kubenswrapper[4689]: I1013 22:15:47.655299 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-6fdqj_d43ea53c-112c-44ee-a9dd-d359de34d88b/ovsdb-server-init/0.log" Oct 13 22:15:47 crc kubenswrapper[4689]: I1013 22:15:47.853567 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-6fdqj_d43ea53c-112c-44ee-a9dd-d359de34d88b/ovsdb-server-init/0.log" Oct 13 22:15:47 crc kubenswrapper[4689]: I1013 22:15:47.869673 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-6fdqj_d43ea53c-112c-44ee-a9dd-d359de34d88b/ovs-vswitchd/0.log" Oct 13 22:15:47 crc kubenswrapper[4689]: I1013 22:15:47.932062 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-6fdqj_d43ea53c-112c-44ee-a9dd-d359de34d88b/ovsdb-server/0.log" Oct 13 22:15:48 crc kubenswrapper[4689]: I1013 22:15:48.055910 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-vjk7j_0185d029-cb9b-4438-a72a-6616759e267e/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:15:48 crc kubenswrapper[4689]: I1013 22:15:48.173776 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_3b50077e-96c6-4908-b3bd-5efa65b83fff/openstack-network-exporter/0.log" Oct 13 22:15:48 crc kubenswrapper[4689]: I1013 22:15:48.178894 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_3b50077e-96c6-4908-b3bd-5efa65b83fff/ovn-northd/0.log" Oct 13 22:15:48 crc kubenswrapper[4689]: I1013 22:15:48.330928 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8/openstack-network-exporter/0.log" Oct 13 22:15:48 crc kubenswrapper[4689]: I1013 22:15:48.351460 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_dbd6cbc7-b558-4e7f-b9c5-1d1588eb99a8/ovsdbserver-nb/0.log" Oct 13 22:15:48 crc kubenswrapper[4689]: I1013 22:15:48.544092 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_d13e0078-efa1-47aa-86f7-c7e19e2283af/openstack-network-exporter/0.log" Oct 13 22:15:48 crc kubenswrapper[4689]: I1013 22:15:48.563653 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_d13e0078-efa1-47aa-86f7-c7e19e2283af/ovsdbserver-sb/0.log" Oct 13 22:15:48 crc kubenswrapper[4689]: I1013 22:15:48.778020 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7d89fff484-q9fvk_3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5/placement-api/0.log" Oct 13 22:15:48 crc kubenswrapper[4689]: I1013 22:15:48.821703 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4448f3de-e179-4a5c-8a6d-dd16b725bb0c/setup-container/0.log" Oct 13 22:15:48 crc kubenswrapper[4689]: I1013 22:15:48.866712 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7d89fff484-q9fvk_3d9d3da5-b0bc-4d3c-ad4f-0e8120d74be5/placement-log/0.log" Oct 13 22:15:48 crc kubenswrapper[4689]: I1013 22:15:48.866903 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:15:48 crc kubenswrapper[4689]: E1013 22:15:48.867193 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:15:49 crc kubenswrapper[4689]: I1013 22:15:49.106089 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4448f3de-e179-4a5c-8a6d-dd16b725bb0c/setup-container/0.log" Oct 13 22:15:49 crc kubenswrapper[4689]: I1013 22:15:49.133622 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_de9fccf5-fe48-498b-a6db-15e734aa9e61/setup-container/0.log" Oct 13 22:15:49 crc kubenswrapper[4689]: I1013 22:15:49.192351 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_4448f3de-e179-4a5c-8a6d-dd16b725bb0c/rabbitmq/0.log" Oct 13 22:15:49 crc kubenswrapper[4689]: I1013 22:15:49.353063 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_de9fccf5-fe48-498b-a6db-15e734aa9e61/setup-container/0.log" Oct 13 22:15:49 crc kubenswrapper[4689]: I1013 22:15:49.407143 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_de9fccf5-fe48-498b-a6db-15e734aa9e61/rabbitmq/0.log" Oct 13 22:15:49 crc kubenswrapper[4689]: I1013 22:15:49.437027 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-5qrnv_024b2226-0636-4d0c-8225-53b2e5ad7050/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:15:49 crc kubenswrapper[4689]: I1013 22:15:49.637244 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-65d9m_e6f26597-49c3-41a5-8352-cef0d439fd5c/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:15:49 crc kubenswrapper[4689]: I1013 22:15:49.689491 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-lkz6q_44571d1c-f8f4-442a-ac47-51d05df37bfc/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:15:49 crc kubenswrapper[4689]: I1013 22:15:49.825252 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-wdbxk_5d79295b-e957-48d5-b56e-d84c50ca7250/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:15:49 crc kubenswrapper[4689]: I1013 22:15:49.994250 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-dnqdr_1d1c238b-38b4-471f-a55b-706b93036367/ssh-known-hosts-edpm-deployment/0.log" Oct 13 22:15:50 crc kubenswrapper[4689]: I1013 22:15:50.130741 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5f54846cc7-sc4qr_bae630fb-d96c-45df-abb1-d7913a06d4e6/proxy-server/0.log" Oct 13 22:15:50 crc kubenswrapper[4689]: I1013 22:15:50.259709 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5f54846cc7-sc4qr_bae630fb-d96c-45df-abb1-d7913a06d4e6/proxy-httpd/0.log" Oct 13 22:15:50 crc kubenswrapper[4689]: I1013 22:15:50.304534 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-8rq2n_0817e909-9d71-4ddd-b3e7-49e41383b1da/swift-ring-rebalance/0.log" Oct 13 22:15:50 crc kubenswrapper[4689]: I1013 22:15:50.443374 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/account-auditor/0.log" Oct 13 22:15:50 crc kubenswrapper[4689]: I1013 22:15:50.512495 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/account-reaper/0.log" Oct 13 22:15:50 crc kubenswrapper[4689]: I1013 22:15:50.586408 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/account-replicator/0.log" Oct 13 22:15:50 crc kubenswrapper[4689]: I1013 22:15:50.629979 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/account-server/0.log" Oct 13 22:15:50 crc kubenswrapper[4689]: I1013 22:15:50.644029 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/container-auditor/0.log" Oct 13 22:15:50 crc kubenswrapper[4689]: I1013 22:15:50.785871 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/container-server/0.log" Oct 13 22:15:50 crc kubenswrapper[4689]: I1013 22:15:50.788081 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/container-replicator/0.log" Oct 13 22:15:50 crc kubenswrapper[4689]: I1013 22:15:50.845832 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/container-updater/0.log" Oct 13 22:15:50 crc kubenswrapper[4689]: I1013 22:15:50.880748 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/object-auditor/0.log" Oct 13 22:15:51 crc kubenswrapper[4689]: I1013 22:15:51.022882 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/object-server/0.log" Oct 13 22:15:51 crc kubenswrapper[4689]: I1013 22:15:51.029875 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/object-replicator/0.log" Oct 13 22:15:51 crc kubenswrapper[4689]: I1013 22:15:51.042295 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/object-expirer/0.log" Oct 13 22:15:51 crc kubenswrapper[4689]: I1013 22:15:51.083070 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/object-updater/0.log" Oct 13 22:15:51 crc kubenswrapper[4689]: I1013 22:15:51.196428 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/rsync/0.log" Oct 13 22:15:51 crc kubenswrapper[4689]: I1013 22:15:51.271452 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d67ea903-1ba1-4116-a39d-b2ca0d5d7eb5/swift-recon-cron/0.log" Oct 13 22:15:51 crc kubenswrapper[4689]: I1013 22:15:51.303795 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-v8s47_a253b97c-0119-461e-bf69-7dfe5bb90e7f/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:15:51 crc kubenswrapper[4689]: I1013 22:15:51.517608 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_2fd6769f-1acf-441d-8569-13baec5fcf72/tempest-tests-tempest-tests-runner/0.log" Oct 13 22:15:51 crc kubenswrapper[4689]: I1013 22:15:51.536202 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_618c5ade-6473-4bb8-88b8-f92932517f5e/test-operator-logs-container/0.log" Oct 13 22:15:51 crc kubenswrapper[4689]: I1013 22:15:51.735161 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-2hh5n_aed5fbf9-103b-48fb-b982-61a445ff7f09/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 13 22:15:55 crc kubenswrapper[4689]: E1013 22:15:55.048553 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d.scope\": RecentStats: unable to find data in memory cache]" Oct 13 22:15:55 crc kubenswrapper[4689]: I1013 22:15:55.977348 4689 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod8244e583-5f1c-47bb-a2d9-c587742d7f85"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod8244e583-5f1c-47bb-a2d9-c587742d7f85] : Timed out while waiting for systemd to remove kubepods-besteffort-pod8244e583_5f1c_47bb_a2d9_c587742d7f85.slice" Oct 13 22:15:55 crc kubenswrapper[4689]: E1013 22:15:55.977690 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod8244e583-5f1c-47bb-a2d9-c587742d7f85] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod8244e583-5f1c-47bb-a2d9-c587742d7f85] : Timed out while waiting for systemd to remove kubepods-besteffort-pod8244e583_5f1c_47bb_a2d9_c587742d7f85.slice" pod="openshift-must-gather-whcsp/crc-debug-bflqv" podUID="8244e583-5f1c-47bb-a2d9-c587742d7f85" Oct 13 22:15:56 crc kubenswrapper[4689]: I1013 22:15:56.139333 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whcsp/crc-debug-bflqv" Oct 13 22:16:01 crc kubenswrapper[4689]: I1013 22:16:01.354141 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_83675f56-8efb-4eb1-b6e5-65dde48c3ee4/memcached/0.log" Oct 13 22:16:03 crc kubenswrapper[4689]: I1013 22:16:03.873660 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:16:03 crc kubenswrapper[4689]: E1013 22:16:03.874152 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:16:05 crc kubenswrapper[4689]: E1013 22:16:05.288524 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d.scope\": RecentStats: unable to find data in memory cache]" Oct 13 22:16:13 crc kubenswrapper[4689]: I1013 22:16:13.892472 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd_d1a40226-990d-4a34-b499-91ee14c3da86/util/0.log" Oct 13 22:16:14 crc kubenswrapper[4689]: I1013 22:16:14.100290 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd_d1a40226-990d-4a34-b499-91ee14c3da86/util/0.log" Oct 13 22:16:14 crc kubenswrapper[4689]: I1013 22:16:14.133781 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd_d1a40226-990d-4a34-b499-91ee14c3da86/pull/0.log" Oct 13 22:16:14 crc kubenswrapper[4689]: I1013 22:16:14.147587 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd_d1a40226-990d-4a34-b499-91ee14c3da86/pull/0.log" Oct 13 22:16:14 crc kubenswrapper[4689]: I1013 22:16:14.286002 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd_d1a40226-990d-4a34-b499-91ee14c3da86/util/0.log" Oct 13 22:16:14 crc kubenswrapper[4689]: I1013 22:16:14.286369 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd_d1a40226-990d-4a34-b499-91ee14c3da86/extract/0.log" Oct 13 22:16:14 crc kubenswrapper[4689]: I1013 22:16:14.294959 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_669aa84d0d224047c7ed556f06af34522e8007d37531e711d9fcaca0c4p6thd_d1a40226-990d-4a34-b499-91ee14c3da86/pull/0.log" Oct 13 22:16:14 crc kubenswrapper[4689]: I1013 22:16:14.462623 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f84fcdbb-pvsmw_5f5620d8-6856-4b27-b74a-208edc1ec0d7/kube-rbac-proxy/0.log" Oct 13 22:16:14 crc kubenswrapper[4689]: I1013 22:16:14.485114 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f84fcdbb-pvsmw_5f5620d8-6856-4b27-b74a-208edc1ec0d7/manager/0.log" Oct 13 22:16:14 crc kubenswrapper[4689]: I1013 22:16:14.524356 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-59cdc64769-w28f4_574e8237-5b30-4af8-b93f-449d9ec98793/kube-rbac-proxy/0.log" Oct 13 22:16:14 crc kubenswrapper[4689]: I1013 22:16:14.677538 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-687df44cdb-ltx5l_3bca3670-4880-4598-abbd-8ed51e351c5a/kube-rbac-proxy/0.log" Oct 13 22:16:14 crc kubenswrapper[4689]: I1013 22:16:14.681168 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-59cdc64769-w28f4_574e8237-5b30-4af8-b93f-449d9ec98793/manager/0.log" Oct 13 22:16:14 crc kubenswrapper[4689]: I1013 22:16:14.718206 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-687df44cdb-ltx5l_3bca3670-4880-4598-abbd-8ed51e351c5a/manager/0.log" Oct 13 22:16:14 crc kubenswrapper[4689]: I1013 22:16:14.847466 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7bb46cd7d-zm9b4_2ed371e4-bae8-4320-9b6b-e28103137aee/kube-rbac-proxy/0.log" Oct 13 22:16:14 crc kubenswrapper[4689]: I1013 22:16:14.922669 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7bb46cd7d-zm9b4_2ed371e4-bae8-4320-9b6b-e28103137aee/manager/0.log" Oct 13 22:16:15 crc kubenswrapper[4689]: I1013 22:16:15.038837 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-6d9967f8dd-8lb6r_980922e5-08ec-418a-b207-f463195cc6da/kube-rbac-proxy/0.log" Oct 13 22:16:15 crc kubenswrapper[4689]: I1013 22:16:15.109314 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-6d9967f8dd-8lb6r_980922e5-08ec-418a-b207-f463195cc6da/manager/0.log" Oct 13 22:16:15 crc kubenswrapper[4689]: I1013 22:16:15.128553 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d74794d9b-gg4tl_97a56885-e550-415b-95be-3f61e0ac38e5/kube-rbac-proxy/0.log" Oct 13 22:16:15 crc kubenswrapper[4689]: I1013 22:16:15.248904 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d74794d9b-gg4tl_97a56885-e550-415b-95be-3f61e0ac38e5/manager/0.log" Oct 13 22:16:15 crc kubenswrapper[4689]: I1013 22:16:15.289646 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-585fc5b659-v8wsd_ff2d1098-a378-4314-8662-1dfb98c56aae/kube-rbac-proxy/0.log" Oct 13 22:16:15 crc kubenswrapper[4689]: I1013 22:16:15.466983 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-585fc5b659-v8wsd_ff2d1098-a378-4314-8662-1dfb98c56aae/manager/0.log" Oct 13 22:16:15 crc kubenswrapper[4689]: I1013 22:16:15.499303 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-74cb5cbc49-mrv92_4e3b3f49-bb44-4375-9bab-527a5e0e57a5/kube-rbac-proxy/0.log" Oct 13 22:16:15 crc kubenswrapper[4689]: I1013 22:16:15.533538 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-74cb5cbc49-mrv92_4e3b3f49-bb44-4375-9bab-527a5e0e57a5/manager/0.log" Oct 13 22:16:15 crc kubenswrapper[4689]: E1013 22:16:15.580143 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-conmon-b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1863da92_265f_451e_a741_a184c8d3f781.slice/crio-b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d.scope\": RecentStats: unable to find data in memory cache]" Oct 13 22:16:15 crc kubenswrapper[4689]: I1013 22:16:15.662101 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-ddb98f99b-7zs79_de26ce24-8f8f-42e6-bd80-5331eb11f6b1/kube-rbac-proxy/0.log" Oct 13 22:16:15 crc kubenswrapper[4689]: I1013 22:16:15.746001 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-ddb98f99b-7zs79_de26ce24-8f8f-42e6-bd80-5331eb11f6b1/manager/0.log" Oct 13 22:16:16 crc kubenswrapper[4689]: I1013 22:16:16.031033 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-59578bc799-v664m_f3d70a25-802f-4d17-a250-3b76584ff7dc/kube-rbac-proxy/0.log" Oct 13 22:16:16 crc kubenswrapper[4689]: I1013 22:16:16.065965 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-59578bc799-v664m_f3d70a25-802f-4d17-a250-3b76584ff7dc/manager/0.log" Oct 13 22:16:16 crc kubenswrapper[4689]: I1013 22:16:16.119553 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5777b4f897-qjpss_87d04908-37f4-42ab-8328-893b4e255767/kube-rbac-proxy/0.log" Oct 13 22:16:16 crc kubenswrapper[4689]: I1013 22:16:16.218327 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5777b4f897-qjpss_87d04908-37f4-42ab-8328-893b4e255767/manager/0.log" Oct 13 22:16:16 crc kubenswrapper[4689]: I1013 22:16:16.263617 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-797d478b46-jlgcf_5dc35208-04aa-4df8-af17-6ce8ad80199f/kube-rbac-proxy/0.log" Oct 13 22:16:16 crc kubenswrapper[4689]: I1013 22:16:16.367032 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-797d478b46-jlgcf_5dc35208-04aa-4df8-af17-6ce8ad80199f/manager/0.log" Oct 13 22:16:16 crc kubenswrapper[4689]: I1013 22:16:16.464072 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57bb74c7bf-xpqj5_86e5e806-711e-4a41-9c65-0b121d0228e6/kube-rbac-proxy/0.log" Oct 13 22:16:16 crc kubenswrapper[4689]: I1013 22:16:16.533664 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57bb74c7bf-xpqj5_86e5e806-711e-4a41-9c65-0b121d0228e6/manager/0.log" Oct 13 22:16:16 crc kubenswrapper[4689]: I1013 22:16:16.611910 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6d7c7ddf95-kd8d5_bc1916e6-51d4-4ca9-b8a2-8be1659426a2/kube-rbac-proxy/0.log" Oct 13 22:16:16 crc kubenswrapper[4689]: I1013 22:16:16.670370 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6d7c7ddf95-kd8d5_bc1916e6-51d4-4ca9-b8a2-8be1659426a2/manager/0.log" Oct 13 22:16:16 crc kubenswrapper[4689]: I1013 22:16:16.780304 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7_599721f1-ec3e-4a83-b769-db5440b2f260/kube-rbac-proxy/0.log" Oct 13 22:16:16 crc kubenswrapper[4689]: I1013 22:16:16.781130 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6cc7fb757dh8cf7_599721f1-ec3e-4a83-b769-db5440b2f260/manager/0.log" Oct 13 22:16:16 crc kubenswrapper[4689]: I1013 22:16:16.867529 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:16:16 crc kubenswrapper[4689]: E1013 22:16:16.867796 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:16:16 crc kubenswrapper[4689]: I1013 22:16:16.909365 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7fcd588594-tnfjj_654be83b-acf2-4c39-b753-8f4cc7750052/kube-rbac-proxy/0.log" Oct 13 22:16:17 crc kubenswrapper[4689]: I1013 22:16:17.107957 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5555666847-lgpdl_6a94802f-3575-410f-8d65-f1d11165a10e/kube-rbac-proxy/0.log" Oct 13 22:16:17 crc kubenswrapper[4689]: I1013 22:16:17.271544 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5555666847-lgpdl_6a94802f-3575-410f-8d65-f1d11165a10e/operator/0.log" Oct 13 22:16:17 crc kubenswrapper[4689]: I1013 22:16:17.339132 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-wv7t5_c2f0840b-8103-4f7a-8698-3fd60e779a59/registry-server/0.log" Oct 13 22:16:17 crc kubenswrapper[4689]: I1013 22:16:17.581527 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-869cc7797f-f2kxw_d34fac28-ebdd-4c77-ad9d-995611ee01d4/kube-rbac-proxy/0.log" Oct 13 22:16:17 crc kubenswrapper[4689]: I1013 22:16:17.620655 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-869cc7797f-f2kxw_d34fac28-ebdd-4c77-ad9d-995611ee01d4/manager/0.log" Oct 13 22:16:17 crc kubenswrapper[4689]: I1013 22:16:17.783742 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-664664cb68-j6wsf_261f1cfd-d8a7-4dea-baa7-3feb8f67813a/kube-rbac-proxy/0.log" Oct 13 22:16:17 crc kubenswrapper[4689]: I1013 22:16:17.859325 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-664664cb68-j6wsf_261f1cfd-d8a7-4dea-baa7-3feb8f67813a/manager/0.log" Oct 13 22:16:17 crc kubenswrapper[4689]: I1013 22:16:17.981298 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-tn6wl_04a373fa-1962-4bdc-8e26-53d557df6be3/operator/0.log" Oct 13 22:16:18 crc kubenswrapper[4689]: I1013 22:16:18.033925 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f4d5dfdc6-2g8g7_3c9cfbce-22ae-4c0d-9b73-513bf285b4a0/kube-rbac-proxy/0.log" Oct 13 22:16:18 crc kubenswrapper[4689]: I1013 22:16:18.064664 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7fcd588594-tnfjj_654be83b-acf2-4c39-b753-8f4cc7750052/manager/0.log" Oct 13 22:16:18 crc kubenswrapper[4689]: I1013 22:16:18.101924 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f4d5dfdc6-2g8g7_3c9cfbce-22ae-4c0d-9b73-513bf285b4a0/manager/0.log" Oct 13 22:16:18 crc kubenswrapper[4689]: I1013 22:16:18.220603 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-578874c84d-n52xn_6c0d5f43-6334-41be-bb4f-9d538d40004a/kube-rbac-proxy/0.log" Oct 13 22:16:18 crc kubenswrapper[4689]: I1013 22:16:18.286770 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-578874c84d-n52xn_6c0d5f43-6334-41be-bb4f-9d538d40004a/manager/0.log" Oct 13 22:16:18 crc kubenswrapper[4689]: I1013 22:16:18.296959 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-ffcdd6c94-fcfzv_d9a167f4-4f3c-44d9-9e18-7fdf79273d12/kube-rbac-proxy/0.log" Oct 13 22:16:18 crc kubenswrapper[4689]: I1013 22:16:18.363109 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-ffcdd6c94-fcfzv_d9a167f4-4f3c-44d9-9e18-7fdf79273d12/manager/0.log" Oct 13 22:16:18 crc kubenswrapper[4689]: I1013 22:16:18.443918 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-646675d848-t5zxf_61a8f77a-a34b-4e04-b508-fc0fb8e7ede7/manager/0.log" Oct 13 22:16:18 crc kubenswrapper[4689]: I1013 22:16:18.447695 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-646675d848-t5zxf_61a8f77a-a34b-4e04-b508-fc0fb8e7ede7/kube-rbac-proxy/0.log" Oct 13 22:16:31 crc kubenswrapper[4689]: I1013 22:16:31.867276 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:16:31 crc kubenswrapper[4689]: E1013 22:16:31.868028 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:16:32 crc kubenswrapper[4689]: I1013 22:16:32.531814 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-nmkts_1c2ff4c0-e878-4984-a12a-1b4f8cb3bc54/control-plane-machine-set-operator/0.log" Oct 13 22:16:32 crc kubenswrapper[4689]: I1013 22:16:32.725108 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fdwnr_202442b7-241e-44ee-b24f-0eac63864890/machine-api-operator/0.log" Oct 13 22:16:32 crc kubenswrapper[4689]: I1013 22:16:32.729968 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fdwnr_202442b7-241e-44ee-b24f-0eac63864890/kube-rbac-proxy/0.log" Oct 13 22:16:43 crc kubenswrapper[4689]: I1013 22:16:43.866925 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:16:43 crc kubenswrapper[4689]: I1013 22:16:43.867543 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-g6vgs_8e5b1294-2785-4023-857f-e404eaed07fb/cert-manager-controller/0.log" Oct 13 22:16:43 crc kubenswrapper[4689]: E1013 22:16:43.867551 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:16:44 crc kubenswrapper[4689]: I1013 22:16:44.063574 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-52xlv_693e1ab8-5677-4bdd-bb02-b8540de9513a/cert-manager-webhook/0.log" Oct 13 22:16:44 crc kubenswrapper[4689]: I1013 22:16:44.077435 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-ndlw8_afb8356c-40a3-4270-bbe2-644b8b14482f/cert-manager-cainjector/0.log" Oct 13 22:16:55 crc kubenswrapper[4689]: I1013 22:16:55.533796 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-47vrf_15bec576-7113-4f6f-8f5a-ed95b3e01608/nmstate-console-plugin/0.log" Oct 13 22:16:55 crc kubenswrapper[4689]: I1013 22:16:55.759447 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-9nfcm_63209f73-d6db-4f5e-9863-37c7d8555f1d/nmstate-handler/0.log" Oct 13 22:16:55 crc kubenswrapper[4689]: I1013 22:16:55.881457 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-tgngb_a76e7989-6bed-472b-8a4f-53227f485adb/kube-rbac-proxy/0.log" Oct 13 22:16:55 crc kubenswrapper[4689]: I1013 22:16:55.890161 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-tgngb_a76e7989-6bed-472b-8a4f-53227f485adb/nmstate-metrics/0.log" Oct 13 22:16:56 crc kubenswrapper[4689]: I1013 22:16:56.006703 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-bmmp2_189fae32-9490-4991-b5c0-2ba0de67d337/nmstate-operator/0.log" Oct 13 22:16:56 crc kubenswrapper[4689]: I1013 22:16:56.115237 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-v7tfb_2f1f259b-9d4c-469c-b336-0f7c4fdac5be/nmstate-webhook/0.log" Oct 13 22:16:58 crc kubenswrapper[4689]: I1013 22:16:58.870146 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:16:58 crc kubenswrapper[4689]: E1013 22:16:58.870932 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:17:10 crc kubenswrapper[4689]: I1013 22:17:10.369146 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-fzzmz_692201d0-1473-499e-b9e6-2d35e6c72032/kube-rbac-proxy/0.log" Oct 13 22:17:10 crc kubenswrapper[4689]: I1013 22:17:10.461499 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-fzzmz_692201d0-1473-499e-b9e6-2d35e6c72032/controller/0.log" Oct 13 22:17:10 crc kubenswrapper[4689]: I1013 22:17:10.582997 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-frr-files/0.log" Oct 13 22:17:10 crc kubenswrapper[4689]: I1013 22:17:10.736404 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-frr-files/0.log" Oct 13 22:17:10 crc kubenswrapper[4689]: I1013 22:17:10.769156 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-reloader/0.log" Oct 13 22:17:10 crc kubenswrapper[4689]: I1013 22:17:10.791967 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-metrics/0.log" Oct 13 22:17:10 crc kubenswrapper[4689]: I1013 22:17:10.792799 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-reloader/0.log" Oct 13 22:17:11 crc kubenswrapper[4689]: I1013 22:17:11.003953 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-reloader/0.log" Oct 13 22:17:11 crc kubenswrapper[4689]: I1013 22:17:11.005795 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-frr-files/0.log" Oct 13 22:17:11 crc kubenswrapper[4689]: I1013 22:17:11.022450 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-metrics/0.log" Oct 13 22:17:11 crc kubenswrapper[4689]: I1013 22:17:11.077206 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-metrics/0.log" Oct 13 22:17:11 crc kubenswrapper[4689]: I1013 22:17:11.185029 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-frr-files/0.log" Oct 13 22:17:11 crc kubenswrapper[4689]: I1013 22:17:11.189146 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-reloader/0.log" Oct 13 22:17:11 crc kubenswrapper[4689]: I1013 22:17:11.213350 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/cp-metrics/0.log" Oct 13 22:17:11 crc kubenswrapper[4689]: I1013 22:17:11.287034 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/controller/0.log" Oct 13 22:17:11 crc kubenswrapper[4689]: I1013 22:17:11.396257 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/kube-rbac-proxy/0.log" Oct 13 22:17:11 crc kubenswrapper[4689]: I1013 22:17:11.396839 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/frr-metrics/0.log" Oct 13 22:17:11 crc kubenswrapper[4689]: I1013 22:17:11.476461 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/kube-rbac-proxy-frr/0.log" Oct 13 22:17:11 crc kubenswrapper[4689]: I1013 22:17:11.662869 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/reloader/0.log" Oct 13 22:17:11 crc kubenswrapper[4689]: I1013 22:17:11.714517 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-bqf92_13456adb-0ae6-4db3-a924-dabf915a24aa/frr-k8s-webhook-server/0.log" Oct 13 22:17:11 crc kubenswrapper[4689]: I1013 22:17:11.867328 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:17:11 crc kubenswrapper[4689]: E1013 22:17:11.867641 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:17:11 crc kubenswrapper[4689]: I1013 22:17:11.889618 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5dd59d54d9-fw8tw_61c6188b-33cc-42e9-b2b2-d67a8fa2f9b1/manager/0.log" Oct 13 22:17:12 crc kubenswrapper[4689]: I1013 22:17:12.068077 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5776bf7669-g6lbn_58b9bc04-cebe-4c96-9fdc-14fd4a71f45e/webhook-server/0.log" Oct 13 22:17:12 crc kubenswrapper[4689]: I1013 22:17:12.239023 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-z2p9k_77872557-bf06-47e5-b7cb-0101ddd79f56/kube-rbac-proxy/0.log" Oct 13 22:17:12 crc kubenswrapper[4689]: I1013 22:17:12.665982 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-z2p9k_77872557-bf06-47e5-b7cb-0101ddd79f56/speaker/0.log" Oct 13 22:17:12 crc kubenswrapper[4689]: I1013 22:17:12.791291 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7d7t6_bb5b52f5-2cd3-4945-8242-96deb1549036/frr/0.log" Oct 13 22:17:25 crc kubenswrapper[4689]: I1013 22:17:25.494176 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94_8adf85c6-59ab-4e3f-8830-9d7509cb34b4/util/0.log" Oct 13 22:17:25 crc kubenswrapper[4689]: I1013 22:17:25.867439 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:17:25 crc kubenswrapper[4689]: E1013 22:17:25.868107 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:17:25 crc kubenswrapper[4689]: I1013 22:17:25.987531 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94_8adf85c6-59ab-4e3f-8830-9d7509cb34b4/util/0.log" Oct 13 22:17:26 crc kubenswrapper[4689]: I1013 22:17:26.029950 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94_8adf85c6-59ab-4e3f-8830-9d7509cb34b4/pull/0.log" Oct 13 22:17:26 crc kubenswrapper[4689]: I1013 22:17:26.056022 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94_8adf85c6-59ab-4e3f-8830-9d7509cb34b4/pull/0.log" Oct 13 22:17:26 crc kubenswrapper[4689]: I1013 22:17:26.170085 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94_8adf85c6-59ab-4e3f-8830-9d7509cb34b4/util/0.log" Oct 13 22:17:26 crc kubenswrapper[4689]: I1013 22:17:26.191463 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94_8adf85c6-59ab-4e3f-8830-9d7509cb34b4/pull/0.log" Oct 13 22:17:26 crc kubenswrapper[4689]: I1013 22:17:26.224980 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2ckb94_8adf85c6-59ab-4e3f-8830-9d7509cb34b4/extract/0.log" Oct 13 22:17:26 crc kubenswrapper[4689]: I1013 22:17:26.376899 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5bqtl_7f15d479-e732-4ffb-bb8a-c51ce29ffb85/extract-utilities/0.log" Oct 13 22:17:26 crc kubenswrapper[4689]: I1013 22:17:26.531951 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5bqtl_7f15d479-e732-4ffb-bb8a-c51ce29ffb85/extract-content/0.log" Oct 13 22:17:26 crc kubenswrapper[4689]: I1013 22:17:26.535250 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5bqtl_7f15d479-e732-4ffb-bb8a-c51ce29ffb85/extract-content/0.log" Oct 13 22:17:26 crc kubenswrapper[4689]: I1013 22:17:26.536834 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5bqtl_7f15d479-e732-4ffb-bb8a-c51ce29ffb85/extract-utilities/0.log" Oct 13 22:17:26 crc kubenswrapper[4689]: I1013 22:17:26.708840 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5bqtl_7f15d479-e732-4ffb-bb8a-c51ce29ffb85/extract-utilities/0.log" Oct 13 22:17:26 crc kubenswrapper[4689]: I1013 22:17:26.748667 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5bqtl_7f15d479-e732-4ffb-bb8a-c51ce29ffb85/extract-content/0.log" Oct 13 22:17:26 crc kubenswrapper[4689]: I1013 22:17:26.983547 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5rfc2_22bd82e7-c0ff-418c-8aec-3d373e40bac3/extract-utilities/0.log" Oct 13 22:17:27 crc kubenswrapper[4689]: I1013 22:17:27.160162 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5rfc2_22bd82e7-c0ff-418c-8aec-3d373e40bac3/extract-utilities/0.log" Oct 13 22:17:27 crc kubenswrapper[4689]: I1013 22:17:27.214556 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5bqtl_7f15d479-e732-4ffb-bb8a-c51ce29ffb85/registry-server/0.log" Oct 13 22:17:27 crc kubenswrapper[4689]: I1013 22:17:27.229680 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5rfc2_22bd82e7-c0ff-418c-8aec-3d373e40bac3/extract-content/0.log" Oct 13 22:17:27 crc kubenswrapper[4689]: I1013 22:17:27.250026 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5rfc2_22bd82e7-c0ff-418c-8aec-3d373e40bac3/extract-content/0.log" Oct 13 22:17:27 crc kubenswrapper[4689]: I1013 22:17:27.377198 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5rfc2_22bd82e7-c0ff-418c-8aec-3d373e40bac3/extract-content/0.log" Oct 13 22:17:27 crc kubenswrapper[4689]: I1013 22:17:27.390158 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5rfc2_22bd82e7-c0ff-418c-8aec-3d373e40bac3/extract-utilities/0.log" Oct 13 22:17:27 crc kubenswrapper[4689]: I1013 22:17:27.644805 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm_d77292a7-4976-429d-a389-525109ea00b2/util/0.log" Oct 13 22:17:27 crc kubenswrapper[4689]: I1013 22:17:27.844934 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm_d77292a7-4976-429d-a389-525109ea00b2/pull/0.log" Oct 13 22:17:27 crc kubenswrapper[4689]: I1013 22:17:27.859711 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm_d77292a7-4976-429d-a389-525109ea00b2/util/0.log" Oct 13 22:17:27 crc kubenswrapper[4689]: I1013 22:17:27.871806 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm_d77292a7-4976-429d-a389-525109ea00b2/pull/0.log" Oct 13 22:17:28 crc kubenswrapper[4689]: I1013 22:17:28.035886 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5rfc2_22bd82e7-c0ff-418c-8aec-3d373e40bac3/registry-server/0.log" Oct 13 22:17:28 crc kubenswrapper[4689]: I1013 22:17:28.092705 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm_d77292a7-4976-429d-a389-525109ea00b2/extract/0.log" Oct 13 22:17:28 crc kubenswrapper[4689]: I1013 22:17:28.110928 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm_d77292a7-4976-429d-a389-525109ea00b2/util/0.log" Oct 13 22:17:28 crc kubenswrapper[4689]: I1013 22:17:28.127221 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c8s4jm_d77292a7-4976-429d-a389-525109ea00b2/pull/0.log" Oct 13 22:17:28 crc kubenswrapper[4689]: I1013 22:17:28.292814 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-c55dv_6bdfaec3-47bd-4ca1-98f5-a5af88e0d075/marketplace-operator/0.log" Oct 13 22:17:28 crc kubenswrapper[4689]: I1013 22:17:28.325813 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6br62_1b9207c1-e871-4ab7-b030-03664c9e6af4/extract-utilities/0.log" Oct 13 22:17:28 crc kubenswrapper[4689]: I1013 22:17:28.494813 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6br62_1b9207c1-e871-4ab7-b030-03664c9e6af4/extract-content/0.log" Oct 13 22:17:28 crc kubenswrapper[4689]: I1013 22:17:28.500302 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6br62_1b9207c1-e871-4ab7-b030-03664c9e6af4/extract-utilities/0.log" Oct 13 22:17:28 crc kubenswrapper[4689]: I1013 22:17:28.541013 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6br62_1b9207c1-e871-4ab7-b030-03664c9e6af4/extract-content/0.log" Oct 13 22:17:28 crc kubenswrapper[4689]: I1013 22:17:28.673646 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6br62_1b9207c1-e871-4ab7-b030-03664c9e6af4/extract-utilities/0.log" Oct 13 22:17:28 crc kubenswrapper[4689]: I1013 22:17:28.684757 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6br62_1b9207c1-e871-4ab7-b030-03664c9e6af4/extract-content/0.log" Oct 13 22:17:28 crc kubenswrapper[4689]: I1013 22:17:28.829716 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6br62_1b9207c1-e871-4ab7-b030-03664c9e6af4/registry-server/0.log" Oct 13 22:17:28 crc kubenswrapper[4689]: I1013 22:17:28.862296 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mfrg8_d1307a0a-b3fe-421a-bcdb-b390cb59638d/extract-utilities/0.log" Oct 13 22:17:29 crc kubenswrapper[4689]: I1013 22:17:29.056323 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mfrg8_d1307a0a-b3fe-421a-bcdb-b390cb59638d/extract-utilities/0.log" Oct 13 22:17:29 crc kubenswrapper[4689]: I1013 22:17:29.070458 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mfrg8_d1307a0a-b3fe-421a-bcdb-b390cb59638d/extract-content/0.log" Oct 13 22:17:29 crc kubenswrapper[4689]: I1013 22:17:29.079775 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mfrg8_d1307a0a-b3fe-421a-bcdb-b390cb59638d/extract-content/0.log" Oct 13 22:17:29 crc kubenswrapper[4689]: I1013 22:17:29.252657 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mfrg8_d1307a0a-b3fe-421a-bcdb-b390cb59638d/extract-content/0.log" Oct 13 22:17:29 crc kubenswrapper[4689]: I1013 22:17:29.258219 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mfrg8_d1307a0a-b3fe-421a-bcdb-b390cb59638d/extract-utilities/0.log" Oct 13 22:17:29 crc kubenswrapper[4689]: I1013 22:17:29.701866 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-mfrg8_d1307a0a-b3fe-421a-bcdb-b390cb59638d/registry-server/0.log" Oct 13 22:17:40 crc kubenswrapper[4689]: I1013 22:17:40.867749 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:17:40 crc kubenswrapper[4689]: E1013 22:17:40.868471 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:17:55 crc kubenswrapper[4689]: I1013 22:17:55.867095 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:17:55 crc kubenswrapper[4689]: E1013 22:17:55.867824 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:18:10 crc kubenswrapper[4689]: I1013 22:18:10.867178 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:18:10 crc kubenswrapper[4689]: E1013 22:18:10.867937 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:18:17 crc kubenswrapper[4689]: I1013 22:18:17.357084 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-z67pn"] Oct 13 22:18:17 crc kubenswrapper[4689]: E1013 22:18:17.359398 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6efa7046-25d8-420f-8650-7ef375427162" containerName="container-00" Oct 13 22:18:17 crc kubenswrapper[4689]: I1013 22:18:17.359439 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="6efa7046-25d8-420f-8650-7ef375427162" containerName="container-00" Oct 13 22:18:17 crc kubenswrapper[4689]: I1013 22:18:17.359688 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="6efa7046-25d8-420f-8650-7ef375427162" containerName="container-00" Oct 13 22:18:17 crc kubenswrapper[4689]: I1013 22:18:17.361294 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z67pn" Oct 13 22:18:17 crc kubenswrapper[4689]: I1013 22:18:17.380434 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z67pn"] Oct 13 22:18:17 crc kubenswrapper[4689]: I1013 22:18:17.491710 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgt55\" (UniqueName: \"kubernetes.io/projected/f9fa48e7-4200-481f-92b2-b6450c25cd23-kube-api-access-cgt55\") pod \"community-operators-z67pn\" (UID: \"f9fa48e7-4200-481f-92b2-b6450c25cd23\") " pod="openshift-marketplace/community-operators-z67pn" Oct 13 22:18:17 crc kubenswrapper[4689]: I1013 22:18:17.491775 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9fa48e7-4200-481f-92b2-b6450c25cd23-utilities\") pod \"community-operators-z67pn\" (UID: \"f9fa48e7-4200-481f-92b2-b6450c25cd23\") " pod="openshift-marketplace/community-operators-z67pn" Oct 13 22:18:17 crc kubenswrapper[4689]: I1013 22:18:17.491955 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9fa48e7-4200-481f-92b2-b6450c25cd23-catalog-content\") pod \"community-operators-z67pn\" (UID: \"f9fa48e7-4200-481f-92b2-b6450c25cd23\") " pod="openshift-marketplace/community-operators-z67pn" Oct 13 22:18:17 crc kubenswrapper[4689]: I1013 22:18:17.593709 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgt55\" (UniqueName: \"kubernetes.io/projected/f9fa48e7-4200-481f-92b2-b6450c25cd23-kube-api-access-cgt55\") pod \"community-operators-z67pn\" (UID: \"f9fa48e7-4200-481f-92b2-b6450c25cd23\") " pod="openshift-marketplace/community-operators-z67pn" Oct 13 22:18:17 crc kubenswrapper[4689]: I1013 22:18:17.593757 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9fa48e7-4200-481f-92b2-b6450c25cd23-utilities\") pod \"community-operators-z67pn\" (UID: \"f9fa48e7-4200-481f-92b2-b6450c25cd23\") " pod="openshift-marketplace/community-operators-z67pn" Oct 13 22:18:17 crc kubenswrapper[4689]: I1013 22:18:17.593829 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9fa48e7-4200-481f-92b2-b6450c25cd23-catalog-content\") pod \"community-operators-z67pn\" (UID: \"f9fa48e7-4200-481f-92b2-b6450c25cd23\") " pod="openshift-marketplace/community-operators-z67pn" Oct 13 22:18:17 crc kubenswrapper[4689]: I1013 22:18:17.594355 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9fa48e7-4200-481f-92b2-b6450c25cd23-catalog-content\") pod \"community-operators-z67pn\" (UID: \"f9fa48e7-4200-481f-92b2-b6450c25cd23\") " pod="openshift-marketplace/community-operators-z67pn" Oct 13 22:18:17 crc kubenswrapper[4689]: I1013 22:18:17.594811 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9fa48e7-4200-481f-92b2-b6450c25cd23-utilities\") pod \"community-operators-z67pn\" (UID: \"f9fa48e7-4200-481f-92b2-b6450c25cd23\") " pod="openshift-marketplace/community-operators-z67pn" Oct 13 22:18:17 crc kubenswrapper[4689]: I1013 22:18:17.616425 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgt55\" (UniqueName: \"kubernetes.io/projected/f9fa48e7-4200-481f-92b2-b6450c25cd23-kube-api-access-cgt55\") pod \"community-operators-z67pn\" (UID: \"f9fa48e7-4200-481f-92b2-b6450c25cd23\") " pod="openshift-marketplace/community-operators-z67pn" Oct 13 22:18:17 crc kubenswrapper[4689]: I1013 22:18:17.689557 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z67pn" Oct 13 22:18:18 crc kubenswrapper[4689]: I1013 22:18:18.203808 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z67pn"] Oct 13 22:18:18 crc kubenswrapper[4689]: I1013 22:18:18.465089 4689 generic.go:334] "Generic (PLEG): container finished" podID="f9fa48e7-4200-481f-92b2-b6450c25cd23" containerID="2a9ae82976c4ce95f4717111037bad63c0b8e5292b16ce47c7ff1aae42634538" exitCode=0 Oct 13 22:18:18 crc kubenswrapper[4689]: I1013 22:18:18.465387 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z67pn" event={"ID":"f9fa48e7-4200-481f-92b2-b6450c25cd23","Type":"ContainerDied","Data":"2a9ae82976c4ce95f4717111037bad63c0b8e5292b16ce47c7ff1aae42634538"} Oct 13 22:18:18 crc kubenswrapper[4689]: I1013 22:18:18.465416 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z67pn" event={"ID":"f9fa48e7-4200-481f-92b2-b6450c25cd23","Type":"ContainerStarted","Data":"dac616806afd7310567dc2f8a2de58d354d05e780bb013a3d7e85226e09f8529"} Oct 13 22:18:18 crc kubenswrapper[4689]: E1013 22:18:18.539260 4689 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9fa48e7_4200_481f_92b2_b6450c25cd23.slice/crio-conmon-2a9ae82976c4ce95f4717111037bad63c0b8e5292b16ce47c7ff1aae42634538.scope\": RecentStats: unable to find data in memory cache]" Oct 13 22:18:20 crc kubenswrapper[4689]: I1013 22:18:20.482927 4689 generic.go:334] "Generic (PLEG): container finished" podID="f9fa48e7-4200-481f-92b2-b6450c25cd23" containerID="417db25338aeb6da6d757e18977db7e9b0cdd26b392fb00fcbabf32b33634c38" exitCode=0 Oct 13 22:18:20 crc kubenswrapper[4689]: I1013 22:18:20.483007 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z67pn" event={"ID":"f9fa48e7-4200-481f-92b2-b6450c25cd23","Type":"ContainerDied","Data":"417db25338aeb6da6d757e18977db7e9b0cdd26b392fb00fcbabf32b33634c38"} Oct 13 22:18:21 crc kubenswrapper[4689]: I1013 22:18:21.498272 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z67pn" event={"ID":"f9fa48e7-4200-481f-92b2-b6450c25cd23","Type":"ContainerStarted","Data":"c96e04ba03f6b60b12a377f1c105ef4c058acbfc94e6e99c91195190aad4906e"} Oct 13 22:18:21 crc kubenswrapper[4689]: I1013 22:18:21.528208 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-z67pn" podStartSLOduration=2.056678483 podStartE2EDuration="4.528187803s" podCreationTimestamp="2025-10-13 22:18:17 +0000 UTC" firstStartedPulling="2025-10-13 22:18:18.466946757 +0000 UTC m=+4015.385191842" lastFinishedPulling="2025-10-13 22:18:20.938456067 +0000 UTC m=+4017.856701162" observedRunningTime="2025-10-13 22:18:21.523488512 +0000 UTC m=+4018.441733607" watchObservedRunningTime="2025-10-13 22:18:21.528187803 +0000 UTC m=+4018.446432888" Oct 13 22:18:25 crc kubenswrapper[4689]: I1013 22:18:25.868008 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:18:25 crc kubenswrapper[4689]: E1013 22:18:25.868783 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:18:27 crc kubenswrapper[4689]: I1013 22:18:27.690754 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-z67pn" Oct 13 22:18:27 crc kubenswrapper[4689]: I1013 22:18:27.691076 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-z67pn" Oct 13 22:18:27 crc kubenswrapper[4689]: I1013 22:18:27.739756 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-z67pn" Oct 13 22:18:29 crc kubenswrapper[4689]: I1013 22:18:29.032352 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-z67pn" Oct 13 22:18:29 crc kubenswrapper[4689]: I1013 22:18:29.080349 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z67pn"] Oct 13 22:18:30 crc kubenswrapper[4689]: I1013 22:18:30.569483 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-z67pn" podUID="f9fa48e7-4200-481f-92b2-b6450c25cd23" containerName="registry-server" containerID="cri-o://c96e04ba03f6b60b12a377f1c105ef4c058acbfc94e6e99c91195190aad4906e" gracePeriod=2 Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.021026 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z67pn" Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.172876 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9fa48e7-4200-481f-92b2-b6450c25cd23-catalog-content\") pod \"f9fa48e7-4200-481f-92b2-b6450c25cd23\" (UID: \"f9fa48e7-4200-481f-92b2-b6450c25cd23\") " Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.173082 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9fa48e7-4200-481f-92b2-b6450c25cd23-utilities\") pod \"f9fa48e7-4200-481f-92b2-b6450c25cd23\" (UID: \"f9fa48e7-4200-481f-92b2-b6450c25cd23\") " Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.173123 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cgt55\" (UniqueName: \"kubernetes.io/projected/f9fa48e7-4200-481f-92b2-b6450c25cd23-kube-api-access-cgt55\") pod \"f9fa48e7-4200-481f-92b2-b6450c25cd23\" (UID: \"f9fa48e7-4200-481f-92b2-b6450c25cd23\") " Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.173791 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9fa48e7-4200-481f-92b2-b6450c25cd23-utilities" (OuterVolumeSpecName: "utilities") pod "f9fa48e7-4200-481f-92b2-b6450c25cd23" (UID: "f9fa48e7-4200-481f-92b2-b6450c25cd23"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.180978 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9fa48e7-4200-481f-92b2-b6450c25cd23-kube-api-access-cgt55" (OuterVolumeSpecName: "kube-api-access-cgt55") pod "f9fa48e7-4200-481f-92b2-b6450c25cd23" (UID: "f9fa48e7-4200-481f-92b2-b6450c25cd23"). InnerVolumeSpecName "kube-api-access-cgt55". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.220318 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9fa48e7-4200-481f-92b2-b6450c25cd23-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f9fa48e7-4200-481f-92b2-b6450c25cd23" (UID: "f9fa48e7-4200-481f-92b2-b6450c25cd23"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.275512 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9fa48e7-4200-481f-92b2-b6450c25cd23-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.275554 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cgt55\" (UniqueName: \"kubernetes.io/projected/f9fa48e7-4200-481f-92b2-b6450c25cd23-kube-api-access-cgt55\") on node \"crc\" DevicePath \"\"" Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.275568 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9fa48e7-4200-481f-92b2-b6450c25cd23-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.578370 4689 generic.go:334] "Generic (PLEG): container finished" podID="f9fa48e7-4200-481f-92b2-b6450c25cd23" containerID="c96e04ba03f6b60b12a377f1c105ef4c058acbfc94e6e99c91195190aad4906e" exitCode=0 Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.578417 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z67pn" event={"ID":"f9fa48e7-4200-481f-92b2-b6450c25cd23","Type":"ContainerDied","Data":"c96e04ba03f6b60b12a377f1c105ef4c058acbfc94e6e99c91195190aad4906e"} Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.578442 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z67pn" event={"ID":"f9fa48e7-4200-481f-92b2-b6450c25cd23","Type":"ContainerDied","Data":"dac616806afd7310567dc2f8a2de58d354d05e780bb013a3d7e85226e09f8529"} Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.578458 4689 scope.go:117] "RemoveContainer" containerID="c96e04ba03f6b60b12a377f1c105ef4c058acbfc94e6e99c91195190aad4906e" Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.578464 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z67pn" Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.598681 4689 scope.go:117] "RemoveContainer" containerID="417db25338aeb6da6d757e18977db7e9b0cdd26b392fb00fcbabf32b33634c38" Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.642797 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z67pn"] Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.647276 4689 scope.go:117] "RemoveContainer" containerID="2a9ae82976c4ce95f4717111037bad63c0b8e5292b16ce47c7ff1aae42634538" Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.654779 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-z67pn"] Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.682233 4689 scope.go:117] "RemoveContainer" containerID="c96e04ba03f6b60b12a377f1c105ef4c058acbfc94e6e99c91195190aad4906e" Oct 13 22:18:31 crc kubenswrapper[4689]: E1013 22:18:31.682726 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c96e04ba03f6b60b12a377f1c105ef4c058acbfc94e6e99c91195190aad4906e\": container with ID starting with c96e04ba03f6b60b12a377f1c105ef4c058acbfc94e6e99c91195190aad4906e not found: ID does not exist" containerID="c96e04ba03f6b60b12a377f1c105ef4c058acbfc94e6e99c91195190aad4906e" Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.682757 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c96e04ba03f6b60b12a377f1c105ef4c058acbfc94e6e99c91195190aad4906e"} err="failed to get container status \"c96e04ba03f6b60b12a377f1c105ef4c058acbfc94e6e99c91195190aad4906e\": rpc error: code = NotFound desc = could not find container \"c96e04ba03f6b60b12a377f1c105ef4c058acbfc94e6e99c91195190aad4906e\": container with ID starting with c96e04ba03f6b60b12a377f1c105ef4c058acbfc94e6e99c91195190aad4906e not found: ID does not exist" Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.682781 4689 scope.go:117] "RemoveContainer" containerID="417db25338aeb6da6d757e18977db7e9b0cdd26b392fb00fcbabf32b33634c38" Oct 13 22:18:31 crc kubenswrapper[4689]: E1013 22:18:31.683144 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"417db25338aeb6da6d757e18977db7e9b0cdd26b392fb00fcbabf32b33634c38\": container with ID starting with 417db25338aeb6da6d757e18977db7e9b0cdd26b392fb00fcbabf32b33634c38 not found: ID does not exist" containerID="417db25338aeb6da6d757e18977db7e9b0cdd26b392fb00fcbabf32b33634c38" Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.683201 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"417db25338aeb6da6d757e18977db7e9b0cdd26b392fb00fcbabf32b33634c38"} err="failed to get container status \"417db25338aeb6da6d757e18977db7e9b0cdd26b392fb00fcbabf32b33634c38\": rpc error: code = NotFound desc = could not find container \"417db25338aeb6da6d757e18977db7e9b0cdd26b392fb00fcbabf32b33634c38\": container with ID starting with 417db25338aeb6da6d757e18977db7e9b0cdd26b392fb00fcbabf32b33634c38 not found: ID does not exist" Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.683230 4689 scope.go:117] "RemoveContainer" containerID="2a9ae82976c4ce95f4717111037bad63c0b8e5292b16ce47c7ff1aae42634538" Oct 13 22:18:31 crc kubenswrapper[4689]: E1013 22:18:31.683584 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a9ae82976c4ce95f4717111037bad63c0b8e5292b16ce47c7ff1aae42634538\": container with ID starting with 2a9ae82976c4ce95f4717111037bad63c0b8e5292b16ce47c7ff1aae42634538 not found: ID does not exist" containerID="2a9ae82976c4ce95f4717111037bad63c0b8e5292b16ce47c7ff1aae42634538" Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.683670 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a9ae82976c4ce95f4717111037bad63c0b8e5292b16ce47c7ff1aae42634538"} err="failed to get container status \"2a9ae82976c4ce95f4717111037bad63c0b8e5292b16ce47c7ff1aae42634538\": rpc error: code = NotFound desc = could not find container \"2a9ae82976c4ce95f4717111037bad63c0b8e5292b16ce47c7ff1aae42634538\": container with ID starting with 2a9ae82976c4ce95f4717111037bad63c0b8e5292b16ce47c7ff1aae42634538 not found: ID does not exist" Oct 13 22:18:31 crc kubenswrapper[4689]: I1013 22:18:31.894004 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9fa48e7-4200-481f-92b2-b6450c25cd23" path="/var/lib/kubelet/pods/f9fa48e7-4200-481f-92b2-b6450c25cd23/volumes" Oct 13 22:18:39 crc kubenswrapper[4689]: I1013 22:18:39.867978 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:18:39 crc kubenswrapper[4689]: E1013 22:18:39.868848 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:18:54 crc kubenswrapper[4689]: I1013 22:18:54.866988 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:18:54 crc kubenswrapper[4689]: E1013 22:18:54.867807 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:19:05 crc kubenswrapper[4689]: I1013 22:19:05.867863 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:19:05 crc kubenswrapper[4689]: E1013 22:19:05.868776 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:19:05 crc kubenswrapper[4689]: I1013 22:19:05.897339 4689 generic.go:334] "Generic (PLEG): container finished" podID="77ab2cd8-390f-45e9-bf66-d147582588b3" containerID="c4c4538bcb9a0268088f591f8b9cb1fa6daac4d0fc6f127635e7a9ed29622f6d" exitCode=0 Oct 13 22:19:05 crc kubenswrapper[4689]: I1013 22:19:05.897454 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whcsp/must-gather-rwg5q" event={"ID":"77ab2cd8-390f-45e9-bf66-d147582588b3","Type":"ContainerDied","Data":"c4c4538bcb9a0268088f591f8b9cb1fa6daac4d0fc6f127635e7a9ed29622f6d"} Oct 13 22:19:05 crc kubenswrapper[4689]: I1013 22:19:05.898065 4689 scope.go:117] "RemoveContainer" containerID="c4c4538bcb9a0268088f591f8b9cb1fa6daac4d0fc6f127635e7a9ed29622f6d" Oct 13 22:19:06 crc kubenswrapper[4689]: I1013 22:19:06.231883 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-whcsp_must-gather-rwg5q_77ab2cd8-390f-45e9-bf66-d147582588b3/gather/0.log" Oct 13 22:19:15 crc kubenswrapper[4689]: I1013 22:19:15.138235 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-whcsp/must-gather-rwg5q"] Oct 13 22:19:15 crc kubenswrapper[4689]: I1013 22:19:15.139002 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-whcsp/must-gather-rwg5q" podUID="77ab2cd8-390f-45e9-bf66-d147582588b3" containerName="copy" containerID="cri-o://7463865f2126186595a56015c5c6aba5afed974f8e7634a72676af613fcb70db" gracePeriod=2 Oct 13 22:19:15 crc kubenswrapper[4689]: I1013 22:19:15.152256 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-whcsp/must-gather-rwg5q"] Oct 13 22:19:15 crc kubenswrapper[4689]: I1013 22:19:15.543025 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-whcsp_must-gather-rwg5q_77ab2cd8-390f-45e9-bf66-d147582588b3/copy/0.log" Oct 13 22:19:15 crc kubenswrapper[4689]: I1013 22:19:15.543707 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whcsp/must-gather-rwg5q" Oct 13 22:19:15 crc kubenswrapper[4689]: I1013 22:19:15.651301 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/77ab2cd8-390f-45e9-bf66-d147582588b3-must-gather-output\") pod \"77ab2cd8-390f-45e9-bf66-d147582588b3\" (UID: \"77ab2cd8-390f-45e9-bf66-d147582588b3\") " Oct 13 22:19:15 crc kubenswrapper[4689]: I1013 22:19:15.651397 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhfdp\" (UniqueName: \"kubernetes.io/projected/77ab2cd8-390f-45e9-bf66-d147582588b3-kube-api-access-xhfdp\") pod \"77ab2cd8-390f-45e9-bf66-d147582588b3\" (UID: \"77ab2cd8-390f-45e9-bf66-d147582588b3\") " Oct 13 22:19:15 crc kubenswrapper[4689]: I1013 22:19:15.660997 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77ab2cd8-390f-45e9-bf66-d147582588b3-kube-api-access-xhfdp" (OuterVolumeSpecName: "kube-api-access-xhfdp") pod "77ab2cd8-390f-45e9-bf66-d147582588b3" (UID: "77ab2cd8-390f-45e9-bf66-d147582588b3"). InnerVolumeSpecName "kube-api-access-xhfdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:19:15 crc kubenswrapper[4689]: I1013 22:19:15.753473 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhfdp\" (UniqueName: \"kubernetes.io/projected/77ab2cd8-390f-45e9-bf66-d147582588b3-kube-api-access-xhfdp\") on node \"crc\" DevicePath \"\"" Oct 13 22:19:15 crc kubenswrapper[4689]: I1013 22:19:15.791455 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77ab2cd8-390f-45e9-bf66-d147582588b3-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "77ab2cd8-390f-45e9-bf66-d147582588b3" (UID: "77ab2cd8-390f-45e9-bf66-d147582588b3"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:19:15 crc kubenswrapper[4689]: I1013 22:19:15.855489 4689 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/77ab2cd8-390f-45e9-bf66-d147582588b3-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 13 22:19:15 crc kubenswrapper[4689]: I1013 22:19:15.877188 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77ab2cd8-390f-45e9-bf66-d147582588b3" path="/var/lib/kubelet/pods/77ab2cd8-390f-45e9-bf66-d147582588b3/volumes" Oct 13 22:19:16 crc kubenswrapper[4689]: I1013 22:19:16.004524 4689 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-whcsp_must-gather-rwg5q_77ab2cd8-390f-45e9-bf66-d147582588b3/copy/0.log" Oct 13 22:19:16 crc kubenswrapper[4689]: I1013 22:19:16.005292 4689 generic.go:334] "Generic (PLEG): container finished" podID="77ab2cd8-390f-45e9-bf66-d147582588b3" containerID="7463865f2126186595a56015c5c6aba5afed974f8e7634a72676af613fcb70db" exitCode=143 Oct 13 22:19:16 crc kubenswrapper[4689]: I1013 22:19:16.005347 4689 scope.go:117] "RemoveContainer" containerID="7463865f2126186595a56015c5c6aba5afed974f8e7634a72676af613fcb70db" Oct 13 22:19:16 crc kubenswrapper[4689]: I1013 22:19:16.005561 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whcsp/must-gather-rwg5q" Oct 13 22:19:16 crc kubenswrapper[4689]: I1013 22:19:16.053103 4689 scope.go:117] "RemoveContainer" containerID="c4c4538bcb9a0268088f591f8b9cb1fa6daac4d0fc6f127635e7a9ed29622f6d" Oct 13 22:19:16 crc kubenswrapper[4689]: I1013 22:19:16.136801 4689 scope.go:117] "RemoveContainer" containerID="7463865f2126186595a56015c5c6aba5afed974f8e7634a72676af613fcb70db" Oct 13 22:19:16 crc kubenswrapper[4689]: E1013 22:19:16.140144 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7463865f2126186595a56015c5c6aba5afed974f8e7634a72676af613fcb70db\": container with ID starting with 7463865f2126186595a56015c5c6aba5afed974f8e7634a72676af613fcb70db not found: ID does not exist" containerID="7463865f2126186595a56015c5c6aba5afed974f8e7634a72676af613fcb70db" Oct 13 22:19:16 crc kubenswrapper[4689]: I1013 22:19:16.140191 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7463865f2126186595a56015c5c6aba5afed974f8e7634a72676af613fcb70db"} err="failed to get container status \"7463865f2126186595a56015c5c6aba5afed974f8e7634a72676af613fcb70db\": rpc error: code = NotFound desc = could not find container \"7463865f2126186595a56015c5c6aba5afed974f8e7634a72676af613fcb70db\": container with ID starting with 7463865f2126186595a56015c5c6aba5afed974f8e7634a72676af613fcb70db not found: ID does not exist" Oct 13 22:19:16 crc kubenswrapper[4689]: I1013 22:19:16.140217 4689 scope.go:117] "RemoveContainer" containerID="c4c4538bcb9a0268088f591f8b9cb1fa6daac4d0fc6f127635e7a9ed29622f6d" Oct 13 22:19:16 crc kubenswrapper[4689]: E1013 22:19:16.142501 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4c4538bcb9a0268088f591f8b9cb1fa6daac4d0fc6f127635e7a9ed29622f6d\": container with ID starting with c4c4538bcb9a0268088f591f8b9cb1fa6daac4d0fc6f127635e7a9ed29622f6d not found: ID does not exist" containerID="c4c4538bcb9a0268088f591f8b9cb1fa6daac4d0fc6f127635e7a9ed29622f6d" Oct 13 22:19:16 crc kubenswrapper[4689]: I1013 22:19:16.142545 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4c4538bcb9a0268088f591f8b9cb1fa6daac4d0fc6f127635e7a9ed29622f6d"} err="failed to get container status \"c4c4538bcb9a0268088f591f8b9cb1fa6daac4d0fc6f127635e7a9ed29622f6d\": rpc error: code = NotFound desc = could not find container \"c4c4538bcb9a0268088f591f8b9cb1fa6daac4d0fc6f127635e7a9ed29622f6d\": container with ID starting with c4c4538bcb9a0268088f591f8b9cb1fa6daac4d0fc6f127635e7a9ed29622f6d not found: ID does not exist" Oct 13 22:19:17 crc kubenswrapper[4689]: I1013 22:19:17.867772 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:19:17 crc kubenswrapper[4689]: E1013 22:19:17.868095 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:19:28 crc kubenswrapper[4689]: I1013 22:19:28.868039 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:19:28 crc kubenswrapper[4689]: E1013 22:19:28.868852 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:19:40 crc kubenswrapper[4689]: I1013 22:19:40.867730 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:19:40 crc kubenswrapper[4689]: E1013 22:19:40.868531 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:19:48 crc kubenswrapper[4689]: I1013 22:19:48.994094 4689 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b5jqh"] Oct 13 22:19:48 crc kubenswrapper[4689]: E1013 22:19:48.995090 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9fa48e7-4200-481f-92b2-b6450c25cd23" containerName="extract-utilities" Oct 13 22:19:48 crc kubenswrapper[4689]: I1013 22:19:48.995103 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9fa48e7-4200-481f-92b2-b6450c25cd23" containerName="extract-utilities" Oct 13 22:19:48 crc kubenswrapper[4689]: E1013 22:19:48.995123 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9fa48e7-4200-481f-92b2-b6450c25cd23" containerName="registry-server" Oct 13 22:19:48 crc kubenswrapper[4689]: I1013 22:19:48.995129 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9fa48e7-4200-481f-92b2-b6450c25cd23" containerName="registry-server" Oct 13 22:19:48 crc kubenswrapper[4689]: E1013 22:19:48.995144 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77ab2cd8-390f-45e9-bf66-d147582588b3" containerName="gather" Oct 13 22:19:48 crc kubenswrapper[4689]: I1013 22:19:48.995150 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="77ab2cd8-390f-45e9-bf66-d147582588b3" containerName="gather" Oct 13 22:19:48 crc kubenswrapper[4689]: E1013 22:19:48.995162 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77ab2cd8-390f-45e9-bf66-d147582588b3" containerName="copy" Oct 13 22:19:48 crc kubenswrapper[4689]: I1013 22:19:48.995167 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="77ab2cd8-390f-45e9-bf66-d147582588b3" containerName="copy" Oct 13 22:19:48 crc kubenswrapper[4689]: E1013 22:19:48.995188 4689 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9fa48e7-4200-481f-92b2-b6450c25cd23" containerName="extract-content" Oct 13 22:19:48 crc kubenswrapper[4689]: I1013 22:19:48.995194 4689 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9fa48e7-4200-481f-92b2-b6450c25cd23" containerName="extract-content" Oct 13 22:19:48 crc kubenswrapper[4689]: I1013 22:19:48.995384 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9fa48e7-4200-481f-92b2-b6450c25cd23" containerName="registry-server" Oct 13 22:19:48 crc kubenswrapper[4689]: I1013 22:19:48.995402 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="77ab2cd8-390f-45e9-bf66-d147582588b3" containerName="gather" Oct 13 22:19:48 crc kubenswrapper[4689]: I1013 22:19:48.995414 4689 memory_manager.go:354] "RemoveStaleState removing state" podUID="77ab2cd8-390f-45e9-bf66-d147582588b3" containerName="copy" Oct 13 22:19:48 crc kubenswrapper[4689]: I1013 22:19:48.997037 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b5jqh" Oct 13 22:19:49 crc kubenswrapper[4689]: I1013 22:19:49.006765 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b5jqh"] Oct 13 22:19:49 crc kubenswrapper[4689]: I1013 22:19:49.040780 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73d031d3-cbe1-4c52-a82a-9194f583c550-catalog-content\") pod \"redhat-operators-b5jqh\" (UID: \"73d031d3-cbe1-4c52-a82a-9194f583c550\") " pod="openshift-marketplace/redhat-operators-b5jqh" Oct 13 22:19:49 crc kubenswrapper[4689]: I1013 22:19:49.040917 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjjh6\" (UniqueName: \"kubernetes.io/projected/73d031d3-cbe1-4c52-a82a-9194f583c550-kube-api-access-zjjh6\") pod \"redhat-operators-b5jqh\" (UID: \"73d031d3-cbe1-4c52-a82a-9194f583c550\") " pod="openshift-marketplace/redhat-operators-b5jqh" Oct 13 22:19:49 crc kubenswrapper[4689]: I1013 22:19:49.040986 4689 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73d031d3-cbe1-4c52-a82a-9194f583c550-utilities\") pod \"redhat-operators-b5jqh\" (UID: \"73d031d3-cbe1-4c52-a82a-9194f583c550\") " pod="openshift-marketplace/redhat-operators-b5jqh" Oct 13 22:19:49 crc kubenswrapper[4689]: I1013 22:19:49.142211 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73d031d3-cbe1-4c52-a82a-9194f583c550-catalog-content\") pod \"redhat-operators-b5jqh\" (UID: \"73d031d3-cbe1-4c52-a82a-9194f583c550\") " pod="openshift-marketplace/redhat-operators-b5jqh" Oct 13 22:19:49 crc kubenswrapper[4689]: I1013 22:19:49.142315 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjjh6\" (UniqueName: \"kubernetes.io/projected/73d031d3-cbe1-4c52-a82a-9194f583c550-kube-api-access-zjjh6\") pod \"redhat-operators-b5jqh\" (UID: \"73d031d3-cbe1-4c52-a82a-9194f583c550\") " pod="openshift-marketplace/redhat-operators-b5jqh" Oct 13 22:19:49 crc kubenswrapper[4689]: I1013 22:19:49.142701 4689 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73d031d3-cbe1-4c52-a82a-9194f583c550-utilities\") pod \"redhat-operators-b5jqh\" (UID: \"73d031d3-cbe1-4c52-a82a-9194f583c550\") " pod="openshift-marketplace/redhat-operators-b5jqh" Oct 13 22:19:49 crc kubenswrapper[4689]: I1013 22:19:49.142755 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73d031d3-cbe1-4c52-a82a-9194f583c550-catalog-content\") pod \"redhat-operators-b5jqh\" (UID: \"73d031d3-cbe1-4c52-a82a-9194f583c550\") " pod="openshift-marketplace/redhat-operators-b5jqh" Oct 13 22:19:49 crc kubenswrapper[4689]: I1013 22:19:49.142916 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73d031d3-cbe1-4c52-a82a-9194f583c550-utilities\") pod \"redhat-operators-b5jqh\" (UID: \"73d031d3-cbe1-4c52-a82a-9194f583c550\") " pod="openshift-marketplace/redhat-operators-b5jqh" Oct 13 22:19:49 crc kubenswrapper[4689]: I1013 22:19:49.171993 4689 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjjh6\" (UniqueName: \"kubernetes.io/projected/73d031d3-cbe1-4c52-a82a-9194f583c550-kube-api-access-zjjh6\") pod \"redhat-operators-b5jqh\" (UID: \"73d031d3-cbe1-4c52-a82a-9194f583c550\") " pod="openshift-marketplace/redhat-operators-b5jqh" Oct 13 22:19:49 crc kubenswrapper[4689]: I1013 22:19:49.327990 4689 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b5jqh" Oct 13 22:19:49 crc kubenswrapper[4689]: I1013 22:19:49.810565 4689 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b5jqh"] Oct 13 22:19:50 crc kubenswrapper[4689]: I1013 22:19:50.346025 4689 generic.go:334] "Generic (PLEG): container finished" podID="73d031d3-cbe1-4c52-a82a-9194f583c550" containerID="a6dda8b34ee7914c6cca769b80194e09d28d752bf6ae9ab9640509b89fe86563" exitCode=0 Oct 13 22:19:50 crc kubenswrapper[4689]: I1013 22:19:50.346072 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5jqh" event={"ID":"73d031d3-cbe1-4c52-a82a-9194f583c550","Type":"ContainerDied","Data":"a6dda8b34ee7914c6cca769b80194e09d28d752bf6ae9ab9640509b89fe86563"} Oct 13 22:19:50 crc kubenswrapper[4689]: I1013 22:19:50.346132 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5jqh" event={"ID":"73d031d3-cbe1-4c52-a82a-9194f583c550","Type":"ContainerStarted","Data":"b7d8ce274a571d26223310fcb068626ab3ae4fa4d9ec434ea233104bcb162228"} Oct 13 22:19:50 crc kubenswrapper[4689]: I1013 22:19:50.347741 4689 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 13 22:19:52 crc kubenswrapper[4689]: I1013 22:19:52.372945 4689 generic.go:334] "Generic (PLEG): container finished" podID="73d031d3-cbe1-4c52-a82a-9194f583c550" containerID="295764ad3ec7c77cda5574eff433e7c47ffce9e93788ddde9243ce229c6b64ef" exitCode=0 Oct 13 22:19:52 crc kubenswrapper[4689]: I1013 22:19:52.373051 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5jqh" event={"ID":"73d031d3-cbe1-4c52-a82a-9194f583c550","Type":"ContainerDied","Data":"295764ad3ec7c77cda5574eff433e7c47ffce9e93788ddde9243ce229c6b64ef"} Oct 13 22:19:53 crc kubenswrapper[4689]: I1013 22:19:53.383836 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5jqh" event={"ID":"73d031d3-cbe1-4c52-a82a-9194f583c550","Type":"ContainerStarted","Data":"acbaedc967a4a11b809e04cb4945fc5c65db55f086d140fa92794854605a524a"} Oct 13 22:19:53 crc kubenswrapper[4689]: I1013 22:19:53.874445 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:19:53 crc kubenswrapper[4689]: E1013 22:19:53.874772 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:19:59 crc kubenswrapper[4689]: I1013 22:19:59.328191 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b5jqh" Oct 13 22:19:59 crc kubenswrapper[4689]: I1013 22:19:59.329857 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b5jqh" Oct 13 22:19:59 crc kubenswrapper[4689]: I1013 22:19:59.372172 4689 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b5jqh" Oct 13 22:19:59 crc kubenswrapper[4689]: I1013 22:19:59.392649 4689 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b5jqh" podStartSLOduration=8.900129585 podStartE2EDuration="11.392628724s" podCreationTimestamp="2025-10-13 22:19:48 +0000 UTC" firstStartedPulling="2025-10-13 22:19:50.347442886 +0000 UTC m=+4107.265687971" lastFinishedPulling="2025-10-13 22:19:52.839942025 +0000 UTC m=+4109.758187110" observedRunningTime="2025-10-13 22:19:53.40267251 +0000 UTC m=+4110.320917595" watchObservedRunningTime="2025-10-13 22:19:59.392628724 +0000 UTC m=+4116.310873809" Oct 13 22:19:59 crc kubenswrapper[4689]: I1013 22:19:59.483331 4689 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b5jqh" Oct 13 22:19:59 crc kubenswrapper[4689]: I1013 22:19:59.603629 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b5jqh"] Oct 13 22:20:01 crc kubenswrapper[4689]: I1013 22:20:01.459057 4689 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-b5jqh" podUID="73d031d3-cbe1-4c52-a82a-9194f583c550" containerName="registry-server" containerID="cri-o://acbaedc967a4a11b809e04cb4945fc5c65db55f086d140fa92794854605a524a" gracePeriod=2 Oct 13 22:20:01 crc kubenswrapper[4689]: I1013 22:20:01.927811 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b5jqh" Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.083150 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjjh6\" (UniqueName: \"kubernetes.io/projected/73d031d3-cbe1-4c52-a82a-9194f583c550-kube-api-access-zjjh6\") pod \"73d031d3-cbe1-4c52-a82a-9194f583c550\" (UID: \"73d031d3-cbe1-4c52-a82a-9194f583c550\") " Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.083297 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73d031d3-cbe1-4c52-a82a-9194f583c550-catalog-content\") pod \"73d031d3-cbe1-4c52-a82a-9194f583c550\" (UID: \"73d031d3-cbe1-4c52-a82a-9194f583c550\") " Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.083376 4689 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73d031d3-cbe1-4c52-a82a-9194f583c550-utilities\") pod \"73d031d3-cbe1-4c52-a82a-9194f583c550\" (UID: \"73d031d3-cbe1-4c52-a82a-9194f583c550\") " Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.084734 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73d031d3-cbe1-4c52-a82a-9194f583c550-utilities" (OuterVolumeSpecName: "utilities") pod "73d031d3-cbe1-4c52-a82a-9194f583c550" (UID: "73d031d3-cbe1-4c52-a82a-9194f583c550"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.089761 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73d031d3-cbe1-4c52-a82a-9194f583c550-kube-api-access-zjjh6" (OuterVolumeSpecName: "kube-api-access-zjjh6") pod "73d031d3-cbe1-4c52-a82a-9194f583c550" (UID: "73d031d3-cbe1-4c52-a82a-9194f583c550"). InnerVolumeSpecName "kube-api-access-zjjh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.185999 4689 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjjh6\" (UniqueName: \"kubernetes.io/projected/73d031d3-cbe1-4c52-a82a-9194f583c550-kube-api-access-zjjh6\") on node \"crc\" DevicePath \"\"" Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.186053 4689 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73d031d3-cbe1-4c52-a82a-9194f583c550-utilities\") on node \"crc\" DevicePath \"\"" Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.216707 4689 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73d031d3-cbe1-4c52-a82a-9194f583c550-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "73d031d3-cbe1-4c52-a82a-9194f583c550" (UID: "73d031d3-cbe1-4c52-a82a-9194f583c550"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.288167 4689 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73d031d3-cbe1-4c52-a82a-9194f583c550-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.469120 4689 generic.go:334] "Generic (PLEG): container finished" podID="73d031d3-cbe1-4c52-a82a-9194f583c550" containerID="acbaedc967a4a11b809e04cb4945fc5c65db55f086d140fa92794854605a524a" exitCode=0 Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.469170 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5jqh" event={"ID":"73d031d3-cbe1-4c52-a82a-9194f583c550","Type":"ContainerDied","Data":"acbaedc967a4a11b809e04cb4945fc5c65db55f086d140fa92794854605a524a"} Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.469200 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b5jqh" event={"ID":"73d031d3-cbe1-4c52-a82a-9194f583c550","Type":"ContainerDied","Data":"b7d8ce274a571d26223310fcb068626ab3ae4fa4d9ec434ea233104bcb162228"} Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.469221 4689 scope.go:117] "RemoveContainer" containerID="acbaedc967a4a11b809e04cb4945fc5c65db55f086d140fa92794854605a524a" Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.469237 4689 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b5jqh" Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.491864 4689 scope.go:117] "RemoveContainer" containerID="295764ad3ec7c77cda5574eff433e7c47ffce9e93788ddde9243ce229c6b64ef" Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.506076 4689 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b5jqh"] Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.517513 4689 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-b5jqh"] Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.531936 4689 scope.go:117] "RemoveContainer" containerID="a6dda8b34ee7914c6cca769b80194e09d28d752bf6ae9ab9640509b89fe86563" Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.569970 4689 scope.go:117] "RemoveContainer" containerID="acbaedc967a4a11b809e04cb4945fc5c65db55f086d140fa92794854605a524a" Oct 13 22:20:02 crc kubenswrapper[4689]: E1013 22:20:02.570349 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acbaedc967a4a11b809e04cb4945fc5c65db55f086d140fa92794854605a524a\": container with ID starting with acbaedc967a4a11b809e04cb4945fc5c65db55f086d140fa92794854605a524a not found: ID does not exist" containerID="acbaedc967a4a11b809e04cb4945fc5c65db55f086d140fa92794854605a524a" Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.570382 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acbaedc967a4a11b809e04cb4945fc5c65db55f086d140fa92794854605a524a"} err="failed to get container status \"acbaedc967a4a11b809e04cb4945fc5c65db55f086d140fa92794854605a524a\": rpc error: code = NotFound desc = could not find container \"acbaedc967a4a11b809e04cb4945fc5c65db55f086d140fa92794854605a524a\": container with ID starting with acbaedc967a4a11b809e04cb4945fc5c65db55f086d140fa92794854605a524a not found: ID does not exist" Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.570406 4689 scope.go:117] "RemoveContainer" containerID="295764ad3ec7c77cda5574eff433e7c47ffce9e93788ddde9243ce229c6b64ef" Oct 13 22:20:02 crc kubenswrapper[4689]: E1013 22:20:02.570659 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"295764ad3ec7c77cda5574eff433e7c47ffce9e93788ddde9243ce229c6b64ef\": container with ID starting with 295764ad3ec7c77cda5574eff433e7c47ffce9e93788ddde9243ce229c6b64ef not found: ID does not exist" containerID="295764ad3ec7c77cda5574eff433e7c47ffce9e93788ddde9243ce229c6b64ef" Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.570681 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"295764ad3ec7c77cda5574eff433e7c47ffce9e93788ddde9243ce229c6b64ef"} err="failed to get container status \"295764ad3ec7c77cda5574eff433e7c47ffce9e93788ddde9243ce229c6b64ef\": rpc error: code = NotFound desc = could not find container \"295764ad3ec7c77cda5574eff433e7c47ffce9e93788ddde9243ce229c6b64ef\": container with ID starting with 295764ad3ec7c77cda5574eff433e7c47ffce9e93788ddde9243ce229c6b64ef not found: ID does not exist" Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.570696 4689 scope.go:117] "RemoveContainer" containerID="a6dda8b34ee7914c6cca769b80194e09d28d752bf6ae9ab9640509b89fe86563" Oct 13 22:20:02 crc kubenswrapper[4689]: E1013 22:20:02.570950 4689 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6dda8b34ee7914c6cca769b80194e09d28d752bf6ae9ab9640509b89fe86563\": container with ID starting with a6dda8b34ee7914c6cca769b80194e09d28d752bf6ae9ab9640509b89fe86563 not found: ID does not exist" containerID="a6dda8b34ee7914c6cca769b80194e09d28d752bf6ae9ab9640509b89fe86563" Oct 13 22:20:02 crc kubenswrapper[4689]: I1013 22:20:02.570969 4689 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6dda8b34ee7914c6cca769b80194e09d28d752bf6ae9ab9640509b89fe86563"} err="failed to get container status \"a6dda8b34ee7914c6cca769b80194e09d28d752bf6ae9ab9640509b89fe86563\": rpc error: code = NotFound desc = could not find container \"a6dda8b34ee7914c6cca769b80194e09d28d752bf6ae9ab9640509b89fe86563\": container with ID starting with a6dda8b34ee7914c6cca769b80194e09d28d752bf6ae9ab9640509b89fe86563 not found: ID does not exist" Oct 13 22:20:03 crc kubenswrapper[4689]: I1013 22:20:03.877945 4689 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73d031d3-cbe1-4c52-a82a-9194f583c550" path="/var/lib/kubelet/pods/73d031d3-cbe1-4c52-a82a-9194f583c550/volumes" Oct 13 22:20:05 crc kubenswrapper[4689]: I1013 22:20:05.871699 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:20:05 crc kubenswrapper[4689]: E1013 22:20:05.871950 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:20:18 crc kubenswrapper[4689]: I1013 22:20:18.867433 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:20:18 crc kubenswrapper[4689]: E1013 22:20:18.868251 4689 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-w5fqm_openshift-machine-config-operator(1863da92-265f-451e-a741-a184c8d3f781)\"" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" podUID="1863da92-265f-451e-a741-a184c8d3f781" Oct 13 22:20:31 crc kubenswrapper[4689]: I1013 22:20:31.868249 4689 scope.go:117] "RemoveContainer" containerID="b7ad49f3e47500341c53cfe2b9dc257cb79463bdc21fe9034b9059d253f2853d" Oct 13 22:20:32 crc kubenswrapper[4689]: I1013 22:20:32.760027 4689 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-w5fqm" event={"ID":"1863da92-265f-451e-a741-a184c8d3f781","Type":"ContainerStarted","Data":"90f6eeb7c77e94de3410385cf374157a164fd892b6e76dee28993d59b284b42e"} var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515073275552024460 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015073275553017376 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015073265037016515 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015073265037015465 5ustar corecore